diff --git a/.github/ISSUE_TEMPLATE/01_bug_ai.yml b/.github/ISSUE_TEMPLATE/01_bug_ai.yml deleted file mode 100644 index 36e9036b7840fb536bfc47f1fd9b8359c6736b61..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/01_bug_ai.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Bug Report (AI) -description: Zed Agent Panel Bugs -type: "Bug" -labels: ["ai"] -title: "AI: " -body: - - type: textarea - attributes: - label: Summary - description: Describe the bug with a one line summary, and provide detailed reproduction steps - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - Steps to trigger the problem: - 1. - 2. - 3. - - **Expected Behavior**: - **Actual Behavior**: - - ### Model Provider Details - - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc) - - Model Name: - - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) - - Other Details (MCPs, other settings, etc): - validations: - required: true - - - type: textarea - id: environment - attributes: - label: Zed Version and System Specs - description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' - placeholder: | - Output of "zed: copy system specs into clipboard" - validations: - required: true - - - type: textarea - attributes: - label: If applicable, attach your `Zed.log` file to this issue. - description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. - value: | -
Zed.log - - - ```log - - ``` - -
- validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/04_bug_debugger.yml b/.github/ISSUE_TEMPLATE/04_bug_debugger.yml deleted file mode 100644 index 8361de5c22fe27a8ea2dd9597fdcbcbf6cd9661e..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/04_bug_debugger.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Bug Report (Debugger) -description: Zed Debugger-Related Bugs -type: "Bug" -labels: ["debugger"] -title: "Debugger:
" -body: - - type: textarea - attributes: - label: Summary - description: Describe the bug with a one line summary, and provide detailed reproduction steps - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - Steps to trigger the problem: - 1. - 2. - 3. - - **Expected Behavior**: - **Actual Behavior**: - - validations: - required: true - - type: textarea - id: environment - attributes: - label: Zed Version and System Specs - description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' - placeholder: | - Output of "zed: copy system specs into clipboard" - validations: - required: true - - - type: textarea - attributes: - label: If applicable, attach your `Zed.log` file to this issue. - description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. - value: | -
Zed.log - - - ```log - - ``` - -
- validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/06_bug_git.yml b/.github/ISSUE_TEMPLATE/06_bug_git.yml deleted file mode 100644 index ec6276df3a21b8df6fde5e6f49d868dee329d864..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/06_bug_git.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Bug Report (Git) -description: Zed Git Related Bugs -type: "Bug" -labels: ["git"] -title: "Git:
" -body: - - type: textarea - attributes: - label: Summary - description: Describe the bug with a one-line summary, and provide detailed reproduction steps - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - Steps to trigger the problem: - 1. - 2. - 3. - - **Expected Behavior**: - **Actual Behavior**: - - validations: - required: true - - type: textarea - id: environment - attributes: - label: Zed Version and System Specs - description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' - placeholder: | - Output of "zed: copy system specs into clipboard" - validations: - required: true - - - type: textarea - attributes: - label: If applicable, attach your `Zed.log` file to this issue. - description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. - value: | -
Zed.log - - - ```log - - ``` - -
- validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/07_bug_windows.yml b/.github/ISSUE_TEMPLATE/07_bug_windows.yml deleted file mode 100644 index b4bda930dc81c13224956e0e2cb75ecb26f9e2f5..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/07_bug_windows.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Bug Report (Windows) -description: Zed Windows Related Bugs -type: "Bug" -labels: ["windows"] -title: "Windows:
" -body: - - type: textarea - attributes: - label: Summary - description: Describe the bug with a one-line summary, and provide detailed reproduction steps - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - Steps to trigger the problem: - 1. - 2. - 3. - - **Expected Behavior**: - **Actual Behavior**: - - validations: - required: true - - type: textarea - id: environment - attributes: - label: Zed Version and System Specs - description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' - placeholder: | - Output of "zed: copy system specs into clipboard" - validations: - required: true - - - type: textarea - attributes: - label: If applicable, attach your `Zed.log` file to this issue. - description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. - value: | -
Zed.log - - - ```log - - ``` - -
- validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index 9f069a7a355188f91d18fc528a69433f214f5167..cae10f02ec3b1bcb024f0d1f1bce0691a39054b4 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -1,67 +1,53 @@ -name: Bug Report (Other) -description: | - Something else is broken in Zed (exclude crashing). -type: "Bug" +name: Report a bug +description: Report a problem with Zed. +type: Bug +labels: "state:needs triage" body: - - type: textarea + - type: markdown attributes: - label: Summary - description: Provide a one sentence summary and detailed reproduction steps value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - - DESCRIPTION_HERE - - Steps to reproduce: - 1. - 2. - 3. - 4. + Is this bug already reported? Upvote to get it noticed faster. [Here's the search](https://github.com/zed-industries/zed/issues). Upvote means giving it a :+1: reaction. - **Expected Behavior**: - **Actual Behavior**: - - + Feature request? Please open in [discussions](https://github.com/zed-industries/zed/discussions/new/choose) instead. + Just have a question or need support? Welcome to [Discord Support Forums](https://discord.com/invite/zedindustries). + - type: textarea + attributes: + label: Reproduction steps + description: A step-by-step description of how to reproduce the bug from a **clean Zed install**. The more context you provide, the easier it is to find and fix the problem fast. + placeholder: | + 1. Start Zed + 2. Click X validations: required: true + - type: textarea + attributes: + label: Current vs. Expected behavior + description: | + Current behavior (screenshots, videos, etc. are appreciated), vs. what you expected the behavior to be. + placeholder: | + Current behavior: The icon is blue. Expected behavior: The icon should be red because this is what the setting is documented to do. + validations: + required: true - type: textarea id: environment attributes: - label: Zed Version and System Specs + label: Zed version and system specs description: | - Open Zed, from the command palette select "zed: copy system specs into clipboard" + Open the command palette in Zed, then type “zed: copy system specs into clipboard”. placeholder: | - Output of "zed: copy system specs into clipboard" + Zed: v0.215.0 (Zed Nightly bfe141ea79aa4984028934067ba75c48d99136ae) + OS: macOS 15.1 + Memory: 36 GiB + Architecture: aarch64 validations: required: true - type: textarea attributes: - label: If applicable, attach your `Zed.log` file to this issue. + label: Attach Zed log file description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. + Open the command palette in Zed, then type `zed: open log` to see the last 1000 lines. Or type `zed: reveal log in file manager` in the command palette to reveal the log file itself. value: |
Zed.log @@ -73,3 +59,57 @@ body:
validations: required: false + - type: textarea + attributes: + label: Relevant Zed settings + description: | + Open the command palette in Zed, then type “zed: open settings file” and copy/paste any relevant (e.g., LSP-specific) settings. + value: | +
settings.json + + + ```json + + ``` + +
+ validations: + required: false + - type: textarea + attributes: + label: Relevant Keymap + description: | + Open the command palette in Zed, then type “zed: open keymap file” and copy/paste the file's contents. + value: | +
keymap.json + + + ```json + + ``` + +
+ validations: + required: false + - type: textarea + attributes: + label: (for AI issues) Model provider details + placeholder: | + - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.) + - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5) + - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) + - Other details (ACPs, MCPs, other settings, etc.): + validations: + required: false + - type: dropdown + attributes: + label: If you are using WSL on Windows, what flavor of Linux are you using? + multiple: false + options: + - Arch Linux + - Ubuntu + - Fedora + - Mint + - Pop!_OS + - NixOS + - Other diff --git a/.github/ISSUE_TEMPLATE/11_crash_report.yml b/.github/ISSUE_TEMPLATE/11_crash_report.yml index 97979308ae5ab4037c32db2660544c1299f2c750..a019848e874fee709a935a54aa68bae813374628 100644 --- a/.github/ISSUE_TEMPLATE/11_crash_report.yml +++ b/.github/ISSUE_TEMPLATE/11_crash_report.yml @@ -1,42 +1,35 @@ -name: Crash Report -description: Zed is Crashing or Hanging -type: "Crash" +name: Report a crash +description: Zed is crashing or freezing or hanging. +type: Crash +labels: "state:needs triage" body: - type: textarea attributes: - label: Summary - description: Summarize the issue with detailed reproduction steps - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - Steps to trigger the problem: - 1. - 2. - 3. - - Actual Behavior: - Expected Behavior: - + label: Reproduction steps + description: A step-by-step description of how to reproduce the crash from a **clean Zed install**. The more context you provide, the easier it is to find and fix the problem fast. + placeholder: | + 1. Start Zed + 2. Perform an action + 3. Zed crashes validations: required: true - type: textarea - id: environment attributes: - label: Zed Version and System Specs - description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"' + label: Zed version and system specs + description: | + Open the command palette in Zed, then type “zed: copy system specs into clipboard”. placeholder: | - Output of "zed: copy system specs into clipboard" + Zed: v0.215.0 (Zed Nightly bfe141ea79aa4984028934067ba75c48d99136ae) + OS: macOS 15.1 + Memory: 36 GiB + Architecture: aarch64 validations: required: true - type: textarea attributes: - label: If applicable, attach your `Zed.log` file to this issue. + label: Attach Zed log file description: | - From the command palette, run `zed: open log` to see the last 1000 lines. - Or run `zed: reveal log in file manager` to reveal the log file itself. + Open the command palette in Zed, then type `zed: open log` to see the last 1000 lines. Or type `zed: reveal log in file manager` in the command palette to reveal the log file itself. value: |
Zed.log diff --git a/.github/ISSUE_TEMPLATE/99_other.yml b/.github/ISSUE_TEMPLATE/99_other.yml deleted file mode 100644 index 9383a576b1a7d43ea4bb4e02d911891555518532..0000000000000000000000000000000000000000 --- a/.github/ISSUE_TEMPLATE/99_other.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Other [Staff Only] -description: Zed Staff Only -body: - - type: textarea - attributes: - label: Summary - value: | - - SUMMARY_SENTENCE_HERE - - ### Description - - IF YOU DO NOT WORK FOR ZED INDUSTRIES DO NOT CREATE ISSUES WITH THIS TEMPLATE. - THEY WILL BE AUTO-CLOSED AND MAY RESULT IN YOU BEING BANNED FROM THE ZED ISSUE TRACKER. - - FEATURE REQUESTS / SUPPORT REQUESTS SHOULD BE OPENED AS DISCUSSIONS: - https://github.com/zed-industries/zed/discussions/new/choose - validations: - required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3d0b2ce0af79944c9c86dba6187b0fd7d91c5b8c..9bf14ce72d5feb5da9f04bb1064e7351407e6f55 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,9 +1,9 @@ -# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json +# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json blank_issues_enabled: false contact_links: - - name: Feature Request + - name: Feature request url: https://github.com/zed-industries/zed/discussions/new/choose - about: To request a feature, open a new Discussion in one of the appropriate Discussion categories - - name: "Zed Discord" - url: https://zed.dev/community-links - about: Real-time discussion and user support + about: To request a feature, open a new discussion under one of the appropriate categories. + - name: Our Discord community + url: https://discord.com/invite/zedindustries + about: Join our Discord server for real-time discussion and user support. diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index 3bc28249f3b8b2a08a48be040177530c5ecfd407..a071aba3a87dcf8e8f48f740115cfddf48b9f805 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -4,10 +4,8 @@ description: "Runs the tests" runs: using: "composite" steps: - - name: Install Rust - shell: bash -euxo pipefail {0} - run: | - cargo install cargo-nextest --locked + - name: Install nextest + uses: taiki-e/install-action@nextest - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/actions/run_tests_windows/action.yml b/.github/actions/run_tests_windows/action.yml index d85d47cb969e22ca3c73c9ab8caca279a9b5ba88..307b73f363b7d5fd7a3c9e5082c4f17d622ec165 100644 --- a/.github/actions/run_tests_windows/action.yml +++ b/.github/actions/run_tests_windows/action.yml @@ -11,9 +11,8 @@ runs: using: "composite" steps: - name: Install test runner - shell: powershell working-directory: ${{ inputs.working-directory }} - run: cargo install cargo-nextest --locked + uses: taiki-e/install-action@nextest - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/workflows/after_release.yml b/.github/workflows/after_release.yml index cf77b429353e59697858434c3904a68b91ecc63a..21b9a8fe0e184773b35752565da6530bb666c6ec 100644 --- a/.github/workflows/after_release.yml +++ b/.github/workflows/after_release.yml @@ -5,13 +5,27 @@ on: release: types: - published + workflow_dispatch: + inputs: + tag_name: + description: tag_name + required: true + type: string + prerelease: + description: prerelease + required: true + type: boolean + body: + description: body + type: string + default: '' jobs: rebuild_releases_page: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: after_release::rebuild_releases_page::refresh_cloud_releases - run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }} + run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name || inputs.tag_name }} shell: bash -euxo pipefail {0} - name: after_release::rebuild_releases_page::redeploy_zed_dev run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev @@ -21,13 +35,13 @@ jobs: post_to_discord: needs: - rebuild_releases_page - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: get-release-url name: after_release::post_to_discord::get_release_url run: | - if [ "${{ github.event.release.prerelease }}" == "true" ]; then + if [ "${{ github.event.release.prerelease || inputs.prerelease }}" == "true" ]; then URL="https://zed.dev/releases/preview" else URL="https://zed.dev/releases/stable" @@ -40,9 +54,9 @@ jobs: uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 with: stringToTruncate: | - 📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released! + 📣 Zed [${{ github.event.release.tag_name || inputs.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released! - ${{ github.event.release.body }} + ${{ github.event.release.body || inputs.body }} maxLength: 2000 truncationSymbol: '...' - name: after_release::post_to_discord::discord_webhook_action @@ -56,22 +70,23 @@ jobs: - id: set-package-name name: after_release::publish_winget::set_package_name run: | - if [ "${{ github.event.release.prerelease }}" == "true" ]; then - PACKAGE_NAME=ZedIndustries.Zed.Preview - else - PACKAGE_NAME=ZedIndustries.Zed - fi + if ("${{ github.event.release.prerelease || inputs.prerelease }}" -eq "true") { + $PACKAGE_NAME = "ZedIndustries.Zed.Preview" + } else { + $PACKAGE_NAME = "ZedIndustries.Zed" + } - echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT" - shell: bash -euxo pipefail {0} + echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT + shell: pwsh - name: after_release::publish_winget::winget_releaser uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f with: identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }} + release-tag: ${{ github.event.release.tag_name || inputs.tag_name }} max-versions-to-keep: 5 token: ${{ secrets.WINGET_TOKEN }} create_sentry_release: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo @@ -86,3 +101,19 @@ jobs: SENTRY_ORG: zed-dev SENTRY_PROJECT: zed SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + notify_on_failure: + needs: + - rebuild_releases_page + - post_to_discord + - publish_winget + - create_sentry_release + if: failure() + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: release::notify_on_failure::notify_slack + run: |- + curl -X POST -H 'Content-type: application/json'\ + --data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + shell: bash -euxo pipefail {0} + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml new file mode 100644 index 0000000000000000000000000000000000000000..d3688a722aa107efb3dfb95351404f43c9aece65 --- /dev/null +++ b/.github/workflows/autofix_pr.yml @@ -0,0 +1,128 @@ +# Generated from xtask::workflows::autofix_pr +# Rebuild with `cargo xtask workflows`. +name: autofix_pr +run-name: 'autofix PR #${{ inputs.pr_number }}' +on: + workflow_dispatch: + inputs: + pr_number: + description: pr_number + required: true + type: string + run_clippy: + description: run_clippy + type: boolean + default: 'true' +jobs: + run_autofix: + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: autofix_pr::run_autofix::checkout_pr + run: gh pr checkout ${{ inputs.pr_number }} + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} + - name: steps::setup_pnpm + uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 + with: + version: '9' + - name: autofix_pr::run_autofix::run_prettier_fix + run: ./script/prettier --write + shell: bash -euxo pipefail {0} + - name: autofix_pr::run_autofix::run_cargo_fmt + run: cargo fmt --all + shell: bash -euxo pipefail {0} + - name: autofix_pr::run_autofix::run_clippy_fix + if: ${{ inputs.run_clippy }} + run: cargo clippy --workspace --release --all-targets --all-features --fix --allow-dirty --allow-staged + shell: bash -euxo pipefail {0} + - id: create-patch + name: autofix_pr::run_autofix::create_patch + run: | + if git diff --quiet; then + echo "No changes to commit" + echo "has_changes=false" >> "$GITHUB_OUTPUT" + else + git diff > autofix.patch + echo "has_changes=true" >> "$GITHUB_OUTPUT" + fi + shell: bash -euxo pipefail {0} + - name: upload artifact autofix-patch + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: autofix-patch + path: autofix.patch + if-no-files-found: ignore + retention-days: '1' + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + outputs: + has_changes: ${{ steps.create-patch.outputs.has_changes }} + commit_changes: + needs: + - run_autofix + if: needs.run_autofix.outputs.has_changes == 'true' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - id: get-app-token + name: steps::authenticate_as_zippy + uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + with: + app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} + private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: steps::checkout_repo_with_token + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + token: ${{ steps.get-app-token.outputs.token }} + - name: autofix_pr::commit_changes::checkout_pr + run: gh pr checkout ${{ inputs.pr_number }} + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + - name: autofix_pr::download_patch_artifact + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 + with: + name: autofix-patch + - name: autofix_pr::commit_changes::apply_patch + run: git apply autofix.patch + shell: bash -euxo pipefail {0} + - name: autofix_pr::commit_changes::commit_and_push + run: | + git commit -am "Autofix" + git push + shell: bash -euxo pipefail {0} + env: + GIT_COMMITTER_NAME: Zed Zippy + GIT_COMMITTER_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: Zed Zippy + GIT_AUTHOR_EMAIL: 234243425+zed-zippy[bot]@users.noreply.github.com + GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} +concurrency: + group: ${{ github.workflow }}-${{ inputs.pr_number }} + cancel-in-progress: true diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index bfaf7a271b5e31b60c999c7dcf8d17538d135355..e1ae890043f31269a9c894f9f8ba408b3db81ffb 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -42,7 +42,7 @@ jobs: exit 1 ;; esac - which cargo-set-version > /dev/null || cargo install cargo-edit + which cargo-set-version > /dev/null || cargo install cargo-edit -f --no-default-features --features "set-version" output="$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')" export GIT_COMMITTER_NAME="Zed Bot" export GIT_COMMITTER_EMAIL="hi@zed.dev" diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml index 69a46558396bd04db9f43e5d401c74d14b07fc88..d4dee5154f2209521f3e9d183c05c118e8861521 100644 --- a/.github/workflows/cherry_pick.yml +++ b/.github/workflows/cherry_pick.yml @@ -1,6 +1,7 @@ # Generated from xtask::workflows::cherry_pick # Rebuild with `cargo xtask workflows`. name: cherry_pick +run-name: 'cherry_pick to ${{ inputs.channel }} #${{ inputs.pr_number }}' on: workflow_dispatch: inputs: @@ -16,6 +17,10 @@ on: description: channel required: true type: string + pr_number: + description: pr_number + required: true + type: string jobs: run_cherry_pick: runs-on: namespace-profile-2x4-ubuntu-2404 @@ -25,7 +30,7 @@ jobs: with: clean: false - id: get-app-token - name: cherry_pick::run_cherry_pick::authenticate_as_zippy + name: steps::authenticate_as_zippy uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} diff --git a/.github/workflows/community_champion_auto_labeler.yml b/.github/workflows/community_champion_auto_labeler.yml index c525bf4738f888b5ca84230982ff1f4f5da2db2f..d73b38320731e0a2f9a52ff863de5095eddb7b6a 100644 --- a/.github/workflows/community_champion_auto_labeler.yml +++ b/.github/workflows/community_champion_auto_labeler.yml @@ -13,13 +13,73 @@ jobs: steps: - name: Check if author is a community champion and apply label uses: actions/github-script@v7 + env: + COMMUNITY_CHAMPIONS: | + 0x2CA + 5brian + 5herlocked + abdelq + afgomez + AidanV + akbxr + AlvaroParker + amtoaer + artemevsevev + bajrangCoder + bcomnes + Be-ing + blopker + bnjjj + bobbymannino + CharlesChen0823 + chbk + cppcoffee + davidbarsky + davewa + ddoemonn + djsauble + errmayank + fantacell + findrakecil + FloppyDisco + gko + huacnlee + imumesh18 + jacobtread + jansol + jeffreyguenther + jenslys + jongretar + lemorage + lnay + marcocondrache + marius851000 + mikebronner + ognevny + playdohface + RemcoSmitsDev + romaninsh + Simek + someone13574 + sourcefrog + suxiaoshao + Takk8IS + thedadams + tidely + timvermeulen + valentinegb + versecafe + vitallium + warrenjokinen + WhySoBad + ya7010 + Zertsov with: script: | - const communityChampionBody = `${{ secrets.COMMUNITY_CHAMPIONS }}`; - - const communityChampions = communityChampionBody + const communityChampions = process.env.COMMUNITY_CHAMPIONS .split('\n') - .map(handle => handle.trim().toLowerCase()); + .map(handle => handle.trim().toLowerCase()) + .filter(handle => handle.length > 0); let author; if (context.eventName === 'issues') { diff --git a/.github/workflows/community_close_stale_issues.yml b/.github/workflows/community_close_stale_issues.yml index a38354c31709502d7c35bb43104691c0e63d9f4b..14c1a0a08338ee513a8269094b41ee404beef726 100644 --- a/.github/workflows/community_close_stale_issues.yml +++ b/.github/workflows/community_close_stale_issues.yml @@ -1,7 +1,7 @@ name: "Close Stale Issues" on: schedule: - - cron: "0 7,9,11 * * 3" + - cron: "0 8 31 DEC *" workflow_dispatch: jobs: @@ -15,14 +15,15 @@ jobs: stale-issue-message: > Hi there! 👋 - We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days. + We're working to clean up our issue tracker by closing older bugs that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and it will be kept open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, it will close automatically in 14 days. Thanks for your help! close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue." - days-before-stale: 120 - days-before-close: 7 - any-of-issue-labels: "bug,panic / crash" + days-before-stale: 60 + days-before-close: 14 + only-issue-types: "Bug,Crash" operations-per-run: 1000 ascending: true enable-statistics: true stale-issue-label: "stale" + exempt-issue-labels: "never stale" diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml index 5bcb733f3f21c95e530d7c221df080997dfc24eb..48fc850f8f039d5c25071ba91381ea9f905ab811 100644 --- a/.github/workflows/compare_perf.yml +++ b/.github/workflows/compare_perf.yml @@ -39,8 +39,7 @@ jobs: run: ./script/download-wasi-sdk shell: bash -euxo pipefail {0} - name: compare_perf::run_perf::install_hyperfine - run: cargo install hyperfine - shell: bash -euxo pipefail {0} + uses: taiki-e/install-action@hyperfine - name: steps::git_checkout run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }} shell: bash -euxo pipefail {0} diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 054767e5f1fd86c2a5b8fa2112802e797ec10f6e..9d6054eb3e7546088d29dd9c6316a3494ea6fb17 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -12,7 +12,7 @@ on: - main jobs: danger: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index c61879faa8cd0a5dbdbed03a140f8e558f13322b..ce0c0eac40c8c34992f8838af396e75e6cecc0c8 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -43,9 +43,7 @@ jobs: fetch-depth: 0 - name: Install cargo nextest - shell: bash -euxo pipefail {0} - run: | - cargo install cargo-nextest --locked + uses: taiki-e/install-action@nextest - name: Limit target directory size shell: bash -euxo pipefail {0} diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml new file mode 100644 index 0000000000000000000000000000000000000000..31676e5c914719a34f8b2e61193475ed107cd2db --- /dev/null +++ b/.github/workflows/extension_bump.yml @@ -0,0 +1,148 @@ +# Generated from xtask::workflows::extension_bump +# Rebuild with `cargo xtask workflows`. +name: extension_bump +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: '1' + CARGO_INCREMENTAL: '0' + ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf +on: + workflow_call: + inputs: + bump-type: + description: bump-type + type: string + default: patch + force-bump: + description: force-bump + required: true + type: boolean + secrets: + app-id: + description: The app ID used to create the PR + required: true + app-secret: + description: The app secret for the corresponding app ID + required: true +jobs: + check_bump_needed: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 0 + - id: compare-versions-check + name: extension_bump::compare_versions + run: | + CURRENT_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)" + PR_PARENT_SHA="${{ github.event.pull_request.head.sha }}" + + if [[ -n "$PR_PARENT_SHA" ]]; then + git checkout "$PR_PARENT_SHA" + elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then + git checkout "$BRANCH_PARENT_SHA" + else + git checkout "$(git log -1 --format=%H)"~1 + fi + + PARENT_COMMIT_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)" + + [[ "$CURRENT_VERSION" == "$PARENT_COMMIT_VERSION" ]] && \ + echo "needs_bump=true" >> "$GITHUB_OUTPUT" || \ + echo "needs_bump=false" >> "$GITHUB_OUTPUT" + + echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" + shell: bash -euxo pipefail {0} + outputs: + needs_bump: ${{ steps.compare-versions-check.outputs.needs_bump }} + current_version: ${{ steps.compare-versions-check.outputs.current_version }} + timeout-minutes: 1 + bump_extension_version: + needs: + - check_bump_needed + if: |- + (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && + (inputs.force-bump == 'true' || needs.check_bump_needed.outputs.needs_bump == 'true') + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - id: generate-token + name: extension_bump::generate_token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.app-id }} + private-key: ${{ secrets.app-secret }} + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: extension_bump::install_bump_2_version + run: pip install bump2version + shell: bash -euxo pipefail {0} + - id: bump-version + name: extension_bump::bump_version + run: | + OLD_VERSION="${{ needs.check_bump_needed.outputs.current_version }}" + + BUMP_FILES=("extension.toml") + if [[ -f "Cargo.toml" ]]; then + BUMP_FILES+=("Cargo.toml") + fi + + bump2version --verbose --current-version "$OLD_VERSION" --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}" + + if [[ -f "Cargo.toml" ]]; then + cargo update --workspace + fi + + NEW_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)" + + echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT" + shell: bash -euxo pipefail {0} + - name: extension_bump::create_pull_request + uses: peter-evans/create-pull-request@v7 + with: + title: Bump version to ${{ steps.bump-version.outputs.new_version }} + body: This PR bumps the version of this extension to v${{ steps.bump-version.outputs.new_version }} + commit-message: Bump version to v${{ steps.bump-version.outputs.new_version }} + branch: zed-zippy-autobump + committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> + base: main + delete-branch: true + token: ${{ steps.generate-token.outputs.token }} + sign-commits: true + assignees: ${{ github.actor }} + timeout-minutes: 1 + create_version_label: + needs: + - check_bump_needed + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.check_bump_needed.outputs.needs_bump == 'false' + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - id: generate-token + name: extension_bump::generate_token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.app-id }} + private-key: ${{ secrets.app-secret }} + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: extension_bump::create_version_tag + uses: actions/github-script@v7 + with: + script: |- + github.rest.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: 'refs/tags/v${{ needs.check_bump_needed.outputs.current_version }}', + sha: context.sha + }) + github-token: ${{ steps.generate-token.outputs.token }} + timeout-minutes: 1 +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/extension_release.yml b/.github/workflows/extension_release.yml new file mode 100644 index 0000000000000000000000000000000000000000..5212a79c3e55637aa932be62aea0a626af545a7c --- /dev/null +++ b/.github/workflows/extension_release.yml @@ -0,0 +1,43 @@ +# Generated from xtask::workflows::extension_release +# Rebuild with `cargo xtask workflows`. +name: extension_release +on: + workflow_call: + secrets: + app-id: + description: The app ID used to create the PR + required: true + app-secret: + description: The app secret for the corresponding app ID + required: true +jobs: + create_release: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - id: generate-token + name: extension_bump::generate_token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.app-id }} + private-key: ${{ secrets.app-secret }} + owner: zed-industries + repositories: extensions + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - id: get-extension-id + name: extension_release::get_extension_id + run: | + EXTENSION_ID="$(sed -n 's/id = \"\(.*\)\"/\1/p' < extension.toml)" + + echo "extension_id=${EXTENSION_ID}" >> "$GITHUB_OUTPUT" + shell: bash -euxo pipefail {0} + - name: extension_release::release_action + uses: huacnlee/zed-extension-action@v2 + with: + extension-name: ${{ steps.get-extension-id.outputs.extension_id }} + push-to: zed-industries/extensions + env: + COMMITTER_TOKEN: ${{ steps.generate-token.outputs.token }} diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml new file mode 100644 index 0000000000000000000000000000000000000000..9f0917e388c74cffed8f342f7504bc111e6f5147 --- /dev/null +++ b/.github/workflows/extension_tests.yml @@ -0,0 +1,133 @@ +# Generated from xtask::workflows::extension_tests +# Rebuild with `cargo xtask workflows`. +name: extension_tests +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: '1' + CARGO_INCREMENTAL: '0' + ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf +on: + workflow_call: {} +jobs: + orchestrate: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} + - id: filter + name: filter + run: | + if [ -z "$GITHUB_BASE_REF" ]; then + echo "Not in a PR context (i.e., push to main/stable/preview)" + COMPARE_REV="$(git rev-parse HEAD~1)" + else + echo "In a PR context comparing to pull_request.base.ref" + git fetch origin "$GITHUB_BASE_REF" --depth=350 + COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" + fi + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + + check_pattern() { + local output_name="$1" + local pattern="$2" + local grep_arg="$3" + + echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \ + echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \ + echo "${output_name}=false" >> "$GITHUB_OUTPUT" + } + + check_pattern "check_rust" '^(Cargo.lock|Cargo.toml|.*\.rs)$' -qP + check_pattern "check_extension" '^.*\.scm$' -qP + shell: bash -euxo pipefail {0} + outputs: + check_rust: ${{ steps.filter.outputs.check_rust }} + check_extension: ${{ steps.filter.outputs.check_extension }} + check_rust: + needs: + - orchestrate + if: needs.orchestrate.outputs.check_rust == 'true' + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::cargo_fmt + run: cargo fmt --all -- --check + shell: bash -euxo pipefail {0} + - name: extension_tests::run_clippy + run: cargo clippy --release --all-targets --all-features -- --deny warnings + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + uses: taiki-e/install-action@nextest + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast + shell: bash -euxo pipefail {0} + env: + NEXTEST_NO_TESTS: warn + timeout-minutes: 3 + check_extension: + needs: + - orchestrate + if: needs.orchestrate.outputs.check_extension == 'true' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - id: cache-zed-extension-cli + name: extension_tests::cache_zed_extension_cli + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 + with: + path: zed-extension + key: zed-extension-${{ env.ZED_EXTENSION_CLI_SHA }} + - name: extension_tests::download_zed_extension_cli + if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true' + run: | + wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" + chmod +x zed-extension + shell: bash -euxo pipefail {0} + - name: extension_tests::check + run: | + mkdir -p /tmp/ext-scratch + mkdir -p /tmp/ext-output + ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + shell: bash -euxo pipefail {0} + timeout-minutes: 2 + tests_pass: + needs: + - orchestrate + - check_rust + - check_extension + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always() + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: run_tests::tests_pass + run: | + set +x + EXIT_CODE=0 + + check_result() { + echo "* $1: $2" + if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi + } + + check_result "orchestrate" "${{ needs.orchestrate.result }}" + check_result "check_rust" "${{ needs.check_rust.result }}" + check_result "check_extension" "${{ needs.check_extension.result }}" + + exit $EXIT_CODE + shell: bash -euxo pipefail {0} +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 25f8b00910f5d64e9319eb40943ae1b5b89d8f28..8cc63340902fb061c66e5896308f2cad9c31f947 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,7 +10,7 @@ on: - v* jobs: run_tests_mac: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: self-mini-macos steps: - name: steps::checkout_repo @@ -29,14 +29,11 @@ jobs: - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: bash -euxo pipefail {0} - name: steps::cleanup_cargo_config if: always() @@ -45,7 +42,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_linux: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo @@ -77,14 +74,19 @@ jobs: - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked + - name: steps::trigger_autofix + if: failure() && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]' + run: gh workflow run autofix_pr.yml -f pr_number=${{ github.event.pull_request.number }} -f run_clippy=true shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: steps::cargo_install_nextest + uses: taiki-e/install-action@nextest - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: bash -euxo pipefail {0} - name: steps::cleanup_cargo_config if: always() @@ -93,7 +95,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_windows: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo @@ -112,14 +114,11 @@ jobs: - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: pwsh - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than.ps1 250 shell: pwsh - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: pwsh - name: steps::cleanup_cargo_config if: always() @@ -128,7 +127,7 @@ jobs: shell: pwsh timeout-minutes: 60 check_scripts: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo @@ -157,7 +156,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 create_draft_release: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo @@ -479,11 +478,31 @@ jobs: if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') runs-on: namespace-profile-2x4-ubuntu-2404 steps: + - id: get-app-token + name: steps::authenticate_as_zippy + uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 + with: + app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} + private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false shell: bash -euxo pipefail {0} env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} + notify_on_failure: + needs: + - upload_release_assets + - auto_release_preview + if: failure() + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: release::notify_on_failure::notify_slack + run: |- + curl -X POST -H 'Content-type: application/json'\ + --data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + shell: bash -euxo pipefail {0} + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 431308bd1cfdf6f4385a8f462edcab8c5769ba5f..d76244175accc3e816cbd7d5dc322d2529a0a236 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -12,7 +12,7 @@ on: - cron: 0 7 * * * jobs: check_style: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: self-mini-macos steps: - name: steps::checkout_repo @@ -28,7 +28,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_windows: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo @@ -47,14 +47,11 @@ jobs: - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: pwsh - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than.ps1 250 shell: pwsh - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: pwsh - name: steps::cleanup_cargo_config if: always() @@ -364,7 +361,7 @@ jobs: needs: - check_style - run_tests_windows - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-32x64-ubuntu-2004 env: ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} @@ -395,7 +392,7 @@ jobs: needs: - check_style - run_tests_windows - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: self-mini-macos env: ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} @@ -437,7 +434,7 @@ jobs: - bundle_mac_x86_64 - bundle_windows_aarch64 - bundle_windows_x86_64 - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-4x8-ubuntu-2204 steps: - name: steps::checkout_repo @@ -493,3 +490,21 @@ jobs: SENTRY_PROJECT: zed SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} timeout-minutes: 60 + notify_on_failure: + needs: + - bundle_linux_aarch64 + - bundle_linux_x86_64 + - bundle_mac_aarch64 + - bundle_mac_x86_64 + - bundle_windows_aarch64 + - bundle_windows_x86_64 + if: failure() + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: release::notify_on_failure::notify_slack + run: |- + curl -X POST -H 'Content-type: application/json'\ + --data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + shell: bash -euxo pipefail {0} + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index 1a875aa2c463d264002f14264993b9c99ae1f49c..421d5a1c8003eaa42977339b4ab8e5e0df7ee014 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -6,6 +6,9 @@ env: CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_EVAL_TELEMETRY: '1' MODEL_NAME: ${{ inputs.model_name }} @@ -48,6 +51,11 @@ jobs: - name: run_agent_evals::agent_evals::run_eval run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}" shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} - name: steps::cleanup_cargo_config if: always() run: | diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index ddedd38ebedb647f07e162286365f4e6b95f45a2..f56e56ac7f139926085f33d6e97d3dea6e03a4bb 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -13,7 +13,7 @@ jobs: bundle_linux_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 env: CARGO_INCREMENTAL: 0 @@ -56,7 +56,7 @@ jobs: bundle_linux_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: namespace-profile-32x64-ubuntu-2004 env: CARGO_INCREMENTAL: 0 @@ -99,7 +99,7 @@ jobs: bundle_mac_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-mini-macos env: CARGO_INCREMENTAL: 0 @@ -145,7 +145,7 @@ jobs: bundle_mac_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-mini-macos env: CARGO_INCREMENTAL: 0 @@ -191,7 +191,7 @@ jobs: bundle_windows_aarch64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-32vcpu-windows-2022 env: CARGO_INCREMENTAL: 0 @@ -229,7 +229,7 @@ jobs: bundle_windows_x86_64: if: |- (github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || - (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) + (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling')) runs-on: self-32vcpu-windows-2022 env: CARGO_INCREMENTAL: 0 diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..cdfb51cc5b351d1079369aef3abfa845ca7d0428 --- /dev/null +++ b/.github/workflows/run_cron_unit_evals.yml @@ -0,0 +1,77 @@ +# Generated from xtask::workflows::run_cron_unit_evals +# Rebuild with `cargo xtask workflows`. +name: run_cron_unit_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} +on: + schedule: + - cron: 47 1 * * 2 + workflow_dispatch: {} +jobs: + cron_unit_evals: + runs-on: namespace-profile-16x32-ubuntu-2204 + strategy: + matrix: + model: + - anthropic/claude-sonnet-4-5-latest + - anthropic/claude-opus-4-5-latest + - google/gemini-3-pro + - openai/gpt-5 + fail-fast: false + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + uses: taiki-e/install-action@nextest + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 250 + shell: bash -euxo pipefail {0} + - name: ./script/run-unit-evals + run: ./script/run-unit-evals + shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} + ZED_AGENT_MODEL: ${{ matrix.model }} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + - name: run_agent_evals::cron_unit_evals::send_failure_to_slack + if: ${{ failure() }} + uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 + with: + method: chat.postMessage + token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} + payload: | + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index a7d0a145b6d26d964020f48c321556032ae567ed..a9a46b7a797faae793c87601d306a2aea80e6592 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -15,7 +15,7 @@ on: - v[0-9]+.[0-9]+.x jobs: orchestrate: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: steps::checkout_repo @@ -47,7 +47,7 @@ jobs: } check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP - check_pattern "run_docs" '^docs/' -qP + check_pattern "run_docs" '^(docs/|crates/.*\.rs)' -qP check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP @@ -59,7 +59,7 @@ jobs: run_nix: ${{ steps.filter.outputs.run_nix }} run_tests: ${{ steps.filter.outputs.run_tests }} check_style: - if: github.repository_owner == 'zed-industries' + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-4x8-ubuntu-2204 steps: - name: steps::checkout_repo @@ -77,6 +77,15 @@ jobs: - name: ./script/prettier run: ./script/prettier shell: bash -euxo pipefail {0} + - name: steps::cargo_fmt + run: cargo fmt --all -- --check + shell: bash -euxo pipefail {0} + - name: steps::trigger_autofix + if: failure() && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]' + run: gh workflow run autofix_pr.yml -f pr_number=${{ github.event.pull_request.number }} -f run_clippy=false + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: ./script/check-todos run: ./script/check-todos shell: bash -euxo pipefail {0} @@ -84,12 +93,9 @@ jobs: run: ./script/check-keymaps shell: bash -euxo pipefail {0} - name: run_tests::check_style::check_for_typos - uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 + uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml - - name: steps::cargo_fmt - run: cargo fmt --all -- --check - shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_windows: needs: @@ -113,14 +119,11 @@ jobs: - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: pwsh - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than.ps1 250 shell: pwsh - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: pwsh - name: steps::cleanup_cargo_config if: always() @@ -163,14 +166,19 @@ jobs: - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked + - name: steps::trigger_autofix + if: failure() && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]' + run: gh workflow run autofix_pr.yml -f pr_number=${{ github.event.pull_request.number }} -f run_clippy=true shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: steps::cargo_install_nextest + uses: taiki-e/install-action@nextest - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: bash -euxo pipefail {0} - name: steps::cleanup_cargo_config if: always() @@ -200,14 +208,11 @@ jobs: - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 300 shell: bash -euxo pipefail {0} - name: steps::cargo_nextest - run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + run: cargo nextest run --workspace --no-fail-fast shell: bash -euxo pipefail {0} - name: steps::cleanup_cargo_config if: always() @@ -500,7 +505,12 @@ jobs: needs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' - runs-on: self-mini-macos + runs-on: namespace-profile-16x32-ubuntu-2204 + env: + GIT_AUTHOR_NAME: Protobuf Action + GIT_AUTHOR_EMAIL: ci@zed.dev + GIT_COMMITTER_NAME: Protobuf Action + GIT_COMMITTER_EMAIL: ci@zed.dev steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -524,6 +534,7 @@ jobs: uses: bufbuild/buf-setup-action@v1 with: version: v1.29.0 + github_token: ${{ secrets.GITHUB_TOKEN }} - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action uses: bufbuild/buf-breaking-action@v1 with: @@ -545,7 +556,7 @@ jobs: - check_scripts - build_nix_linux_x86_64 - build_nix_mac_aarch64 - if: github.repository_owner == 'zed-industries' && always() + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always() runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: run_tests::tests_pass diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index a41b4fb6d7058a97dcd5a98894a0d2c4687ceed4..8f64a5c8bcfd07d56279438795f817bfaa1e2e28 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -1,17 +1,26 @@ -# Generated from xtask::workflows::run_agent_evals +# Generated from xtask::workflows::run_unit_evals # Rebuild with `cargo xtask workflows`. -name: run_agent_evals +name: run_unit_evals env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_EVAL_TELEMETRY: '1' + MODEL_NAME: ${{ inputs.model_name }} on: - schedule: - - cron: 47 1 * * 2 - workflow_dispatch: {} + workflow_dispatch: + inputs: + model_name: + description: model_name + required: true + type: string + commit_sha: + description: commit_sha + required: true + type: string jobs: - unit_evals: + run_unit_evals: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo @@ -37,8 +46,7 @@ jobs: run: ./script/download-wasi-sdk shell: bash -euxo pipefail {0} - name: steps::cargo_install_nextest - run: cargo install cargo-nextest --locked - shell: bash -euxo pipefail {0} + uses: taiki-e/install-action@nextest - name: steps::clear_target_dir_if_large run: ./script/clear-target-dir-if-larger-than 250 shell: bash -euxo pipefail {0} @@ -47,20 +55,15 @@ jobs: shell: bash -euxo pipefail {0} env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - - name: run_agent_evals::unit_evals::send_failure_to_slack - if: ${{ failure() }} - uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 - with: - method: chat.postMessage - token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} - payload: | - channel: C04UDRNNJFQ - text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} + UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }} - name: steps::cleanup_cargo_config if: always() run: | rm -rf ./../.cargo shell: bash -euxo pipefail {0} concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.run_id }} cancel-in-progress: true diff --git a/.gitignore b/.gitignore index 2a91a65b6eaef906681bf3f6e315de07b094c4b1..54faaf1374299ee8f97925a95a93b375c349d707 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ .DS_Store .blob_store .build +.claude/settings.local.json .envrc .flatpak-builder .idea @@ -39,3 +40,6 @@ xcuserdata/ # Don't commit any secrets to the repo. .env .env.secret.toml + +# `nix build` output +/result diff --git a/.mailmap b/.mailmap index db4632d6ca34346d3e8fa289222d7f310b7bdfe5..1e956c52cf76589fc016e1410122ccd94e4818ae 100644 --- a/.mailmap +++ b/.mailmap @@ -141,6 +141,9 @@ Uladzislau Kaminski Uladzislau Kaminski Vitaly Slobodin Vitaly Slobodin +Yara +Yara +Yara Will Bradley Will Bradley WindSoilder diff --git a/.rules b/.rules index 82d15eb9e88299ee7c7fe6c717b2da2646e676a7..7c98c65d7e0eaf3ed0d57898dbd8acee28a220ae 100644 --- a/.rules +++ b/.rules @@ -26,6 +26,12 @@ }); ``` +# Timers in tests + +* In GPUI tests, prefer GPUI executor timers over `smol::Timer::after(...)` when you need timeouts, delays, or to drive `run_until_parked()`: + - Use `cx.background_executor().timer(duration).await` (or `cx.background_executor.timer(duration).await` in `TestAppContext`) so the work is scheduled on GPUI's dispatcher. + - Avoid `smol::Timer::after(...)` for test timeouts when you rely on `run_until_parked()`, because it may not be tracked by GPUI's scheduler and can lead to "nothing left to run" when pumping. + # GPUI GPUI is a UI framework which also provides primitives for state and concurrency management. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9cbac4af2b57f0350fa9f5665e110e0d6e7f6341..f7aceadce18788ae2b8bb9d0fe4b5f16225e70d2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,15 +15,17 @@ with the community to improve the product in ways we haven't thought of (or had In particular we love PRs that are: -- Fixes to existing bugs and issues. -- Small enhancements to existing features, particularly to make them work for more people. +- Fixing or extending the docs. +- Fixing bugs. +- Small enhancements to existing features to make them work for more people (making things work on more platforms/modes/whatever). - Small extra features, like keybindings or actions you miss from other editors or extensions. -- Work towards shipping larger features on our roadmap. +- Part of a Community Program like [Let's Git Together](https://github.com/zed-industries/zed/issues/41541). If you're looking for concrete ideas: -- Our [top-ranking issues](https://github.com/zed-industries/zed/issues/5393) based on votes by the community. -- Our [public roadmap](https://zed.dev/roadmap) contains a rough outline of our near-term priorities for Zed. +- [Curated board of issues](https://github.com/orgs/zed-industries/projects/69) suitable for everyone from first-time contributors to seasoned community champions. +- [Triaged bugs with confirmed steps to reproduce](https://github.com/zed-industries/zed/issues?q=is%3Aissue%20state%3Aopen%20type%3ABug%20label%3Astate%3Areproducible). +- [Area labels](https://github.com/zed-industries/zed/labels?q=area%3A*) to browse bugs in a specific part of the product you care about (after clicking on an area label, add type:Bug to the search). ## Sending changes @@ -37,9 +39,17 @@ like, sorry). Although we will take a look, we tend to only merge about half the PRs that are submitted. If you'd like your PR to have the best chance of being merged: -- Include a clear description of what you're solving, and why it's important to you. -- Include tests. -- If it changes the UI, attach screenshots or screen recordings. +- Make sure the change is **desired**: we're always happy to accept bugfixes, + but features should be confirmed with us first if you aim to avoid wasted + effort. If there isn't already a GitHub issue for your feature with staff + confirmation that we want it, start with a GitHub discussion rather than a PR. +- Include a clear description of **what you're solving**, and why it's important. +- Include **tests**. +- If it changes the UI, attach **screenshots** or screen recordings. +- Make the PR about **one thing only**, e.g. if it's a bugfix, don't add two + features and a refactoring on top of that. +- Keep AI assistance under your judgement and responsibility: it's unlikely + we'll merge a vibe-coded PR that the author doesn't understand. The internal advice for reviewers is as follows: @@ -50,10 +60,9 @@ The internal advice for reviewers is as follows: If you need more feedback from us: the best way is to be responsive to Github comments, or to offer up time to pair with us. -If you are making a larger change, or need advice on how to finish the change -you're making, please open the PR early. We would love to help you get -things right, and it's often easier to see how to solve a problem before the -diff gets too big. +If you need help deciding how to fix a bug, or finish implementing a feature +that we've agreed we want, please open a PR early so we can discuss how to make +the change with code in hand. ## Things we will (probably) not merge @@ -61,11 +70,11 @@ Although there are few hard and fast rules, typically we don't merge: - Anything that can be provided by an extension. For example a new language, or theme. For adding themes or support for a new language to Zed, check out our [docs on developing extensions](https://zed.dev/docs/extensions/developing-extensions). - New file icons. Zed's default icon theme consists of icons that are hand-designed to fit together in a cohesive manner, please don't submit PRs with off-the-shelf SVGs. +- Features where (in our subjective opinion) the extra complexity isn't worth it for the number of people who will benefit. - Giant refactorings. - Non-trivial changes with no tests. - Stylistic code changes that do not alter any app logic. Reducing allocations, removing `.unwrap()`s, fixing typos is great; making code "more readable" — maybe not so much. -- Features where (in our subjective opinion) the extra complexity isn't worth it for the number of people who will benefit. -- Anything that seems completely AI generated. +- Anything that seems AI-generated without understanding the output. ## Bird's-eye view of Zed diff --git a/Cargo.lock b/Cargo.lock index a3300a818c12f39406cc39848cae86eeb26a0a56..86b551b1895a0fd6747c35c3fcfe3859396665fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,6 +37,7 @@ dependencies = [ "terminal", "ui", "url", + "urlencoding", "util", "uuid", "watch", @@ -103,12 +104,22 @@ dependencies = [ "project", "proto", "release_channel", + "semver", "smallvec", "ui", "util", "workspace", ] +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli 0.31.1", +] + [[package]] name = "addr2line" version = "0.25.1" @@ -158,6 +169,7 @@ dependencies = [ "derive_more 0.99.20", "editor", "env_logger 0.11.8", + "eval_utils", "fs", "futures 0.3.31", "git", @@ -183,7 +195,7 @@ dependencies = [ "regex", "reqwest_client", "rust-embed", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -209,14 +221,14 @@ dependencies = [ "worktree", "zed_env_vars", "zlog", - "zstd 0.11.2+zstd.1.5.2", + "zstd", ] [[package]] name = "agent-client-protocol" -version = "0.7.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525705e39c11cd73f7bc784e3681a9386aa30c8d0630808d3dc2237eb4f9cb1b" +checksum = "c2ffe7d502c1e451aafc5aff655000f84d09c9af681354ac0012527009b1af13" dependencies = [ "agent-client-protocol-schema", "anyhow", @@ -225,22 +237,22 @@ dependencies = [ "derive_more 2.0.1", "futures 0.3.31", "log", - "parking_lot", "serde", "serde_json", ] [[package]] name = "agent-client-protocol-schema" -version = "0.6.2" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecf16c18fea41282d6bbadd1549a06be6836bddb1893f44a6235f340fa24e2af" +checksum = "8af81cc2d5c3f9c04f73db452efd058333735ba9d51c2cf7ef33c9fee038e7e6" dependencies = [ "anyhow", "derive_more 2.0.1", - "schemars 1.0.4", + "schemars", "serde", "serde_json", + "strum 0.27.2", ] [[package]] @@ -289,6 +301,7 @@ dependencies = [ name = "agent_settings" version = "0.1.0" dependencies = [ + "agent-client-protocol", "anyhow", "cloud_llm_client", "collections", @@ -298,7 +311,7 @@ dependencies = [ "language_model", "paths", "project", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -322,10 +335,12 @@ dependencies = [ "assistant_slash_command", "assistant_slash_commands", "assistant_text_thread", + "async-fs", "audio", "buffer_diff", "chrono", "client", + "clock", "cloud_llm_client", "collections", "command_palette_hooks", @@ -333,6 +348,7 @@ dependencies = [ "context_server", "db", "editor", + "eval_utils", "extension", "extension_host", "feature_flags", @@ -341,8 +357,10 @@ dependencies = [ "futures 0.3.31", "fuzzy", "gpui", + "gpui_tokio", "html_to_markdown", "http_client", + "image", "indoc", "itertools 0.14.0", "jsonschema", @@ -366,12 +384,13 @@ dependencies = [ "prompt_store", "proto", "rand 0.9.2", - "ref-cast", "release_channel", + "reqwest_client", "rope", "rules_library", - "schemars 1.0.4", + "schemars", "search", + "semver", "serde", "serde_json", "serde_json_lenient", @@ -380,7 +399,6 @@ dependencies = [ "streaming_diff", "task", "telemetry", - "telemetry_events", "terminal", "terminal_view", "text", @@ -392,13 +410,44 @@ dependencies = [ "ui_input", "unindent", "url", - "urlencoding", "util", + "uuid", "watch", "workspace", "zed_actions", ] +[[package]] +name = "agent_ui_v2" +version = "0.1.0" +dependencies = [ + "agent", + "agent_servers", + "agent_settings", + "agent_ui", + "anyhow", + "assistant_text_thread", + "chrono", + "db", + "editor", + "feature_flags", + "fs", + "fuzzy", + "gpui", + "menu", + "project", + "prompt_store", + "serde", + "serde_json", + "settings", + "text", + "time", + "time_format", + "ui", + "util", + "workspace", +] + [[package]] name = "ahash" version = "0.7.8" @@ -625,7 +674,7 @@ dependencies = [ "chrono", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -674,21 +723,6 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "argminmax" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f13d10a41ac8d2ec79ee34178d61e6f47a29c2edfe7ef1721c7383b0359e65" -dependencies = [ - "num-traits", -] - -[[package]] -name = "array-init-cursor" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed51fe0f224d1d4ea768be38c51f9f831dee9d05c163c11fba0b8c44387b1fc3" - [[package]] name = "arraydeque" version = "0.5.1" @@ -842,7 +876,6 @@ dependencies = [ "fs", "futures 0.3.31", "fuzzy", - "globset", "gpui", "html_to_markdown", "http_client", @@ -882,6 +915,7 @@ dependencies = [ "fuzzy", "gpui", "indoc", + "itertools 0.14.0", "language", "language_model", "log", @@ -900,7 +934,7 @@ dependencies = [ "settings", "smallvec", "smol", - "telemetry_events", + "telemetry", "text", "ui", "unindent", @@ -1238,15 +1272,15 @@ dependencies = [ [[package]] name = "async_zip" -version = "0.0.17" +version = "0.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52" +checksum = "0d8c50d65ce1b0e0cb65a785ff615f78860d7754290647d3b983208daa4f85e6" dependencies = [ "async-compression", "crc32fast", "futures-lite 2.6.1", "pin-project", - "thiserror 1.0.69", + "thiserror 2.0.17", ] [[package]] @@ -1271,15 +1305,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "atoi_simd" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a49e05797ca52e312a0c658938b7d00693ef037799ef7187678f212d7684cf" -dependencies = [ - "debug_unsafe", -] - [[package]] name = "atomic" version = "0.5.3" @@ -1341,6 +1366,7 @@ dependencies = [ "parking_lot", "paths", "release_channel", + "semver", "serde", "serde_json", "settings", @@ -1376,6 +1402,7 @@ dependencies = [ "http_client", "markdown_preview", "release_channel", + "semver", "serde", "serde_json", "smol", @@ -1414,9 +1441,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.8.8" +version = "1.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37cf2b6af2a95a20e266782b4f76f1a5e12bf412a9db2de9c1e9123b9d8c0ad8" +checksum = "1856b1b48b65f71a4dd940b1c0931f9a7b646d4a924b9828ffefc1454714668a" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1461,6 +1488,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "879b6c89592deb404ba4dc0ae6b58ffd1795c78991cbb5b8bc441c48a070440d" dependencies = [ "aws-lc-sys", + "untrusted 0.7.1", "zeroize", ] @@ -1479,9 +1507,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.12" +version = "1.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa006bb32360ed90ac51203feafb9d02e3d21046e1fd3a450a404b90ea73e5d" +checksum = "9f2402da1a5e16868ba98725e5d73f26b8116eaa892e56f2cd0bf5eec7985f70" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1504,9 +1532,9 @@ dependencies = [ [[package]] name = "aws-sdk-bedrockruntime" -version = "1.109.0" +version = "1.112.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbfdfd941dcb253c17bf70baddbf1e5b22f19e29d313d2e049bad4b1dadb2011" +checksum = "c06c037e6823696d752702ec2bad758d3cf95d1b92b712c8ac7e93824b5e2391" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1586,9 +1614,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.86.0" +version = "1.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0abbfab841446cce6e87af853a3ba2cc1bc9afcd3f3550dd556c43d434c86d" +checksum = "d05b276777560aa9a196dbba2e3aada4d8006d3d7eeb3ba7fe0c317227d933c4" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1608,9 +1636,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.88.0" +version = "1.90.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a68d675582afea0e94d38b6ca9c5aaae4ca14f1d36faa6edb19b42e687e70d7" +checksum = "f9be14d6d9cd761fac3fd234a0f47f7ed6c0df62d83c0eeb7012750e4732879b" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1630,9 +1658,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.88.0" +version = "1.90.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d30990923f4f675523c51eb1c0dec9b752fb267b36a61e83cbc219c9d86da715" +checksum = "98a862d704c817d865c8740b62d8bbeb5adcb30965e93b471df8a5bcefa20a80" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1653,9 +1681,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.3.5" +version = "1.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffc03068fbb9c8dd5ce1c6fb240678a5cffb86fb2b7b1985c999c4b83c8df68" +checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1712,9 +1740,9 @@ dependencies = [ [[package]] name = "aws-smithy-eventstream" -version = "0.60.12" +version = "0.60.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9656b85088f8d9dc7ad40f9a6c7228e1e8447cdf4b046c87e152e0805dea02fa" +checksum = "e29a304f8319781a39808847efb39561351b1bb76e933da7aa90232673638658" dependencies = [ "aws-smithy-types", "bytes 1.10.1", @@ -1723,9 +1751,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.4" +version = "0.62.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3feafd437c763db26aa04e0cc7591185d0961e64c61885bece0fb9d50ceac671" +checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -1733,6 +1761,7 @@ dependencies = [ "bytes 1.10.1", "bytes-utils", "futures-core", + "futures-util", "http 0.2.12", "http 1.3.1", "http-body 0.4.6", @@ -1744,9 +1773,9 @@ dependencies = [ [[package]] name = "aws-smithy-http-client" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1053b5e587e6fa40ce5a79ea27957b04ba660baa02b28b7436f64850152234f1" +checksum = "623254723e8dfd535f566ee7b2381645f8981da086b5c4aa26c0c41582bb1d2c" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1774,9 +1803,9 @@ dependencies = [ [[package]] name = "aws-smithy-json" -version = "0.61.6" +version = "0.61.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff418fc8ec5cadf8173b10125f05c2e7e1d46771406187b2c878557d4503390" +checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" dependencies = [ "aws-smithy-types", ] @@ -1802,9 +1831,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.9.3" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ab99739082da5347660c556689256438defae3bcefd66c52b095905730e404" +checksum = "0bbe9d018d646b96c7be063dd07987849862b0e6d07c778aad7d93d1be6c1ef0" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1826,9 +1855,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.9.1" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3683c5b152d2ad753607179ed71988e8cfd52964443b4f74fd8e552d0bbfeb46" +checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1843,9 +1872,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.3" +version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f5b3a7486f6690ba25952cabf1e7d75e34d69eaff5081904a47bc79074d6457" +checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" dependencies = [ "base64-simd", "bytes 1.10.1", @@ -1869,18 +1898,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.11" +version = "0.60.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9c34127e8c624bc2999f3b657e749c1393bedc9cd97b92a804db8ced4d2e163" +checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.9" +version = "1.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2fd329bf0e901ff3f60425691410c69094dc2a1f34b331f37bfc4e9ac1565a1" +checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -1979,7 +2008,7 @@ version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ - "addr2line", + "addr2line 0.25.1", "cfg-if", "libc", "miniz_oxide", @@ -2030,7 +2059,7 @@ dependencies = [ "aws-sdk-bedrockruntime", "aws-smithy-types", "futures 0.3.31", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "strum 0.27.2", @@ -2060,26 +2089,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "bincode_derive", - "serde", - "unty", -] - -[[package]] -name = "bincode_derive" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" -dependencies = [ - "virtue", -] - [[package]] name = "bindgen" version = "0.71.1" @@ -2120,30 +2129,15 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "bit-set" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" -dependencies = [ - "bit-vec 0.6.3", -] - [[package]] name = "bit-set" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ - "bit-vec 0.8.0", + "bit-vec", ] -[[package]] -name = "bit-vec" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" - [[package]] name = "bit-vec" version = "0.8.0" @@ -2247,19 +2241,6 @@ dependencies = [ "profiling", ] -[[package]] -name = "blake3" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq 0.3.1", -] - [[package]] name = "block" version = "0.1.6" @@ -2322,9 +2303,9 @@ dependencies = [ [[package]] name = "borrow-or-share" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eeab4423108c5d7c744f4d234de88d18d636100093ae04caf4825134b9c3a32" +checksum = "dc0b364ead1874514c8c2855ab558056ebfeb775653e7ae45ff72f28f8f3166c" [[package]] name = "borsh" @@ -2349,12 +2330,6 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "boxcar" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f64beae40a84da1b4b26ff2761a5b895c12adc41dc25aaee1c4f2bbfe97a6e" - [[package]] name = "breadcrumbs" version = "0.1.0" @@ -2417,6 +2392,7 @@ dependencies = [ "rand 0.9.2", "rope", "serde_json", + "settings", "sum_tree", "text", "unindent", @@ -2520,9 +2496,6 @@ name = "bytes" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" -dependencies = [ - "serde", -] [[package]] name = "bytes-utils" @@ -2575,7 +2548,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9225bdcf4e4a9a4c08bf16607908eb2fbf746828d5e0b5e019726dbf6571f201" dependencies = [ - "darling 0.20.11", + "darling", "proc-macro2", "quote", "syn 2.0.106", @@ -2614,26 +2587,24 @@ dependencies = [ [[package]] name = "calloop" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec" +version = "0.14.3" +source = "git+https://github.com/zed-industries/calloop#eb6b4fd17b9af5ecc226546bdd04185391b3e265" dependencies = [ "bitflags 2.9.4", - "log", "polling", - "rustix 0.38.44", + "rustix 1.1.2", "slab", - "thiserror 1.0.69", + "tracing", ] [[package]] name = "calloop-wayland-source" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95a66a987056935f7efce4ab5668920b5d0dac4a7c99991a67395f13702ddd20" +checksum = "138efcf0940a02ebf0cc8d1eff41a1682a46b431630f4c52450d6265876021fa" dependencies = [ "calloop", - "rustix 0.38.44", + "rustix 1.1.2", "wayland-backend", "wayland-client", ] @@ -2811,15 +2782,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - [[package]] name = "cbc" version = "0.1.2" @@ -2836,7 +2798,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eadd868a2ce9ca38de7eeafdcec9c7065ef89b42b32f0839278d55f35c54d1ff" dependencies = [ "heck 0.4.1", - "indexmap 2.11.4", + "indexmap", "log", "proc-macro2", "quote", @@ -2849,9 +2811,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.41" +version = "1.2.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7" +checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215" dependencies = [ "find-msvc-tools", "jobserver", @@ -2927,12 +2889,24 @@ dependencies = [ "postage", "release_channel", "rpc", + "semver", "settings", "text", "time", "util", ] +[[package]] +name = "chardetng" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea" +dependencies = [ + "cfg-if", + "encoding_rs", + "memchr", +] + [[package]] name = "chrono" version = "0.4.42" @@ -2947,16 +2921,6 @@ dependencies = [ "windows-link 0.2.1", ] -[[package]] -name = "chrono-tz" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" -dependencies = [ - "chrono", - "phf 0.12.1", -] - [[package]] name = "chunked_transfer" version = "1.5.0" @@ -3087,6 +3051,7 @@ dependencies = [ "rayon", "release_channel", "serde", + "serde_json", "tempfile", "util", "windows 0.61.3", @@ -3124,6 +3089,7 @@ dependencies = [ "release_channel", "rpc", "rustls-pki-types", + "semver", "serde", "serde_json", "serde_urlencoded", @@ -3197,25 +3163,11 @@ dependencies = [ "uuid", ] -[[package]] -name = "cloud_zeta2_prompt" -version = "0.1.0" -dependencies = [ - "anyhow", - "cloud_llm_client", - "indoc", - "ordered-float 2.10.1", - "rustc-hash 2.1.1", - "schemars 1.0.4", - "serde", - "strum 0.27.2", -] - [[package]] name = "cmake" -version = "0.1.54" +version = "0.1.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +checksum = "b042e5d8a74ae91bb0961acd039822472ec99f8ab0948cbf6d1369588f8be586" dependencies = [ "cc", ] @@ -3316,8 +3268,8 @@ name = "codestral" version = "0.1.0" dependencies = [ "anyhow", - "edit_prediction", "edit_prediction_context", + "edit_prediction_types", "futures 0.3.31", "gpui", "http_client", @@ -3407,7 +3359,6 @@ dependencies = [ "scrypt", "sea-orm", "sea-orm-macros", - "semantic_version", "semver", "serde", "serde_json", @@ -3482,7 +3433,7 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "indexmap 2.11.4", + "indexmap", "rustc-hash 2.1.1", ] @@ -3508,17 +3459,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "comfy-table" -version = "7.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03b7db8e0b4b2fdad6c551e634134e99ec000e5c8c3b6856c65e8bbaded7a3b" -dependencies = [ - "crossterm", - "unicode-segmentation", - "unicode-width", -] - [[package]] name = "command-fds" version = "0.3.2" @@ -3572,21 +3512,6 @@ dependencies = [ "workspace", ] -[[package]] -name = "compact_str" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" -dependencies = [ - "castaway", - "cfg-if", - "itoa", - "rustversion", - "ryu", - "serde", - "static_assertions", -] - [[package]] name = "component" version = "0.1.0" @@ -3667,16 +3592,30 @@ dependencies = [ ] [[package]] -name = "constant_time_eq" -version = "0.1.5" +name = "const_format" +version = "0.2.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] [[package]] name = "constant_time_eq" -version = "0.3.1" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "context_server" @@ -3687,16 +3626,19 @@ dependencies = [ "collections", "futures 0.3.31", "gpui", + "http_client", "log", "net", "parking_lot", "postage", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", + "slotmap", "smol", "tempfile", + "terminal", "url", "util", ] @@ -3729,7 +3671,7 @@ dependencies = [ "command_palette_hooks", "ctor", "dirs 4.0.0", - "edit_prediction", + "edit_prediction_types", "editor", "fs", "futures 0.3.31", @@ -3754,6 +3696,7 @@ dependencies = [ "task", "theme", "ui", + "url", "util", "workspace", "zlog", @@ -4004,20 +3947,38 @@ dependencies = [ "libc", ] +[[package]] +name = "cranelift-assembler-x64" +version = "0.120.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5023e06632d8f351c2891793ccccfe4aef957954904392434038745fb6f1f68" +dependencies = [ + "cranelift-assembler-x64-meta", +] + +[[package]] +name = "cranelift-assembler-x64-meta" +version = "0.120.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c4012b4c8c1f6eb05c0a0a540e3e1ee992631af51aa2bbb3e712903ce4fd65" +dependencies = [ + "cranelift-srcgen", +] + [[package]] name = "cranelift-bforest" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e15d04a0ce86cb36ead88ad68cf693ffd6cda47052b9e0ac114bc47fd9cd23c4" +checksum = "4d6d883b4942ef3a7104096b8bc6f2d1a41393f159ac8de12aed27b25d67f895" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c6e3969a7ce267259ce244b7867c5d3bc9e65b0a87e81039588dfdeaede9f34" +checksum = "db7b2ee9eec6ca8a716d900d5264d678fb2c290c58c46c8da7f94ee268175d17" dependencies = [ "serde", "serde_derive", @@ -4025,11 +3986,12 @@ dependencies = [ [[package]] name = "cranelift-codegen" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c22032c4cb42558371cf516bb47f26cdad1819d3475c133e93c49f50ebf304e" +checksum = "aeda0892577afdce1ac2e9a983a55f8c5b87a59334e1f79d8f735a2d7ba4f4b4" dependencies = [ "bumpalo", + "cranelift-assembler-x64", "cranelift-bforest", "cranelift-bitset", "cranelift-codegen-meta", @@ -4038,9 +4000,10 @@ dependencies = [ "cranelift-entity", "cranelift-isle", "gimli 0.31.1", - "hashbrown 0.14.5", + "hashbrown 0.15.5", "log", "postcard", + "pulley-interpreter", "regalloc2", "rustc-hash 2.1.1", "serde", @@ -4052,33 +4015,36 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c904bc71c61b27fc57827f4a1379f29de64fe95653b620a3db77d59655eee0b8" +checksum = "e461480d87f920c2787422463313326f67664e68108c14788ba1676f5edfcd15" dependencies = [ + "cranelift-assembler-x64-meta", "cranelift-codegen-shared", + "cranelift-srcgen", + "pulley-interpreter", ] [[package]] name = "cranelift-codegen-shared" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40180f5497572f644ce88c255480981ae2ec1d7bb4d8e0c0136a13b87a2f2ceb" +checksum = "976584d09f200c6c84c4b9ff7af64fc9ad0cb64dffa5780991edd3fe143a30a1" [[package]] name = "cranelift-control" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d132c6d0bd8a489563472afc171759da0707804a65ece7ceb15a8c6d7dd5ef" +checksum = "46d43d70f4e17c545aa88dbf4c84d4200755d27c6e3272ebe4de65802fa6a955" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d0d9618275474fbf679dd018ac6e009acbd6ae6850f6a67be33fb3b00b323" +checksum = "d75418674520cb400c8772bfd6e11a62736c78fc1b6e418195696841d1bf91f1" dependencies = [ "cranelift-bitset", "serde", @@ -4087,9 +4053,9 @@ dependencies = [ [[package]] name = "cranelift-frontend" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fac41e16729107393174b0c9e3730fb072866100e1e64e80a1a963b2e484d57" +checksum = "3c8b1a91c86687a344f3c52dd6dfb6e50db0dfa7f2e9c7711b060b3623e1fdeb" dependencies = [ "cranelift-codegen", "log", @@ -4099,21 +4065,27 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca20d576e5070044d0a72a9effc2deacf4d6aa650403189d8ea50126483944d" +checksum = "711baa4e3432d4129295b39ec2b4040cc1b558874ba0a37d08e832e857db7285" [[package]] name = "cranelift-native" -version = "0.116.1" +version = "0.120.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dee82f3f1f2c4cba9177f1cc5e350fe98764379bcd29340caa7b01f85076c7" +checksum = "41c83e8666e3bcc5ffeaf6f01f356f0e1f9dcd69ce5511a1efd7ca5722001a3f" dependencies = [ "cranelift-codegen", "libc", "target-lexicon 0.13.3", ] +[[package]] +name = "cranelift-srcgen" +version = "0.120.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e3f4d783a55c64266d17dc67d2708852235732a100fc40dd9f1051adc64d7b" + [[package]] name = "crash-context" version = "0.6.3" @@ -4142,7 +4114,7 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "bincode 1.3.3", + "bincode", "cfg-if", "crash-handler", "extension_host", @@ -4155,7 +4127,8 @@ dependencies = [ "serde_json", "smol", "system_specs", - "zstd 0.11.2+zstd.1.5.2", + "windows 0.61.3", + "zstd", ] [[package]] @@ -4301,31 +4274,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] -name = "crossterm" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" -dependencies = [ - "bitflags 2.9.4", - "crossterm_winapi", - "document-features", - "parking_lot", - "rustix 1.1.2", - "winapi", -] - -[[package]] -name = "crossterm_winapi" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" -dependencies = [ - "winapi", -] - -[[package]] -name = "crunchy" -version = "0.2.4" +name = "crunchy" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" @@ -4441,7 +4391,7 @@ checksum = "d74b6bcf49ebbd91f1b1875b706ea46545032a14003b5557b7dfa4bbeba6766e" dependencies = [ "cc", "codespan-reporting 0.13.0", - "indexmap 2.11.4", + "indexmap", "proc-macro2", "quote", "scratch", @@ -4456,7 +4406,7 @@ checksum = "94ca2ad69673c4b35585edfa379617ac364bccd0ba0adf319811ba3a74ffa48a" dependencies = [ "clap", "codespan-reporting 0.13.0", - "indexmap 2.11.4", + "indexmap", "proc-macro2", "quote", "syn 2.0.106", @@ -4474,7 +4424,7 @@ version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a8ebf0b6138325af3ec73324cb3a48b64d57721f17291b151206782e61f66cd" dependencies = [ - "indexmap 2.11.4", + "indexmap", "proc-macro2", "quote", "syn 2.0.106", @@ -4503,7 +4453,7 @@ dependencies = [ "parking_lot", "paths", "proto", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -4522,7 +4472,7 @@ name = "dap-types" version = "0.0.1" source = "git+https://github.com/zed-industries/dap-types?rev=1b461b310481d01e02b2603c16d7144b926339f8#1b461b310481d01e02b2603c16d7144b926339f8" dependencies = [ - "schemars 1.0.4", + "schemars", "serde", "serde_json", ] @@ -4559,18 +4509,8 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", + "darling_core", + "darling_macro", ] [[package]] @@ -4587,38 +4527,13 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.106", -] - [[package]] name = "darling_macro" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", + "darling_core", "quote", "syn 2.0.106", ] @@ -4712,12 +4627,6 @@ dependencies = [ "util", ] -[[package]] -name = "debug_unsafe" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85d3cef41d236720ed453e102153a53e4cc3d2fde848c0078a50cf249e8e3e5b" - [[package]] name = "debugger_tools" version = "0.1.0" @@ -4750,6 +4659,7 @@ dependencies = [ "db", "debugger_tools", "editor", + "feature_flags", "file_icons", "futures 0.3.31", "fuzzy", @@ -4768,7 +4678,7 @@ dependencies = [ "pretty_assertions", "project", "rpc", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -4784,6 +4694,7 @@ dependencies = [ "tree-sitter-go", "tree-sitter-json", "ui", + "ui_input", "unindent", "util", "workspace", @@ -4807,7 +4718,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", ] @@ -4922,7 +4833,7 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9" dependencies = [ - "darling 0.20.11", + "darling", "proc-macro2", "quote", "syn 2.0.106", @@ -5124,15 +5035,6 @@ dependencies = [ "zlog", ] -[[package]] -name = "document-features" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" -dependencies = [ - "litrs", -] - [[package]] name = "documented" version = "0.9.2" @@ -5282,41 +5184,105 @@ dependencies = [ name = "edit_prediction" version = "0.1.0" dependencies = [ + "ai_onboarding", + "anyhow", + "arrayvec", + "brotli", "client", + "clock", + "cloud_api_types", + "cloud_llm_client", + "collections", + "copilot", + "ctor", + "db", + "edit_prediction_context", + "edit_prediction_types", + "feature_flags", + "fs", + "futures 0.3.31", "gpui", + "indoc", + "itertools 0.14.0", "language", + "language_model", + "log", + "lsp", + "menu", + "open_ai", + "parking_lot", + "postage", + "pretty_assertions", + "project", + "pulldown-cmark 0.12.2", + "rand 0.9.2", + "regex", + "release_channel", + "semver", + "serde", + "serde_json", + "settings", + "strum 0.27.2", + "telemetry", + "telemetry_events", + "thiserror 2.0.17", + "ui", + "util", + "uuid", + "workspace", + "worktree", + "zed_actions", + "zeta_prompt", + "zlog", ] [[package]] -name = "edit_prediction_button" +name = "edit_prediction_cli" version = "0.1.0" dependencies = [ + "anthropic", "anyhow", + "chrono", + "clap", "client", "cloud_llm_client", - "codestral", - "copilot", + "collections", + "debug_adapter_extension", + "dirs 4.0.0", "edit_prediction", - "editor", - "feature_flags", + "extension", "fs", "futures 0.3.31", "gpui", + "gpui_tokio", + "http_client", "indoc", "language", - "lsp", + "language_extension", + "language_model", + "language_models", + "languages", + "libc", + "log", + "node_runtime", "paths", + "pretty_assertions", "project", - "regex", + "prompt_store", + "release_channel", + "reqwest_client", + "serde", "serde_json", "settings", - "supermaven", - "telemetry", - "theme", - "ui", - "workspace", - "zed_actions", - "zeta", + "shellexpand 2.1.2", + "smol", + "sqlez", + "sqlez_macros", + "terminal_view", + "util", + "wasmtime", + "watch", + "zeta_prompt", ] [[package]] @@ -5324,36 +5290,82 @@ name = "edit_prediction_context" version = "0.1.0" dependencies = [ "anyhow", - "arrayvec", - "clap", "cloud_llm_client", "collections", + "env_logger 0.11.8", "futures 0.3.31", "gpui", - "hashbrown 0.15.5", "indoc", - "itertools 0.14.0", "language", "log", - "ordered-float 2.10.1", - "postage", + "lsp", + "parking_lot", "pretty_assertions", "project", - "regex", "serde", "serde_json", "settings", - "slotmap", - "strum 0.27.2", + "smallvec", "text", "tree-sitter", - "tree-sitter-c", - "tree-sitter-cpp", - "tree-sitter-go", "util", + "zeta_prompt", "zlog", ] +[[package]] +name = "edit_prediction_types" +version = "0.1.0" +dependencies = [ + "client", + "gpui", + "language", + "text", +] + +[[package]] +name = "edit_prediction_ui" +version = "0.1.0" +dependencies = [ + "anyhow", + "buffer_diff", + "client", + "cloud_llm_client", + "codestral", + "command_palette_hooks", + "copilot", + "edit_prediction", + "edit_prediction_types", + "editor", + "feature_flags", + "fs", + "futures 0.3.31", + "git", + "gpui", + "indoc", + "language", + "log", + "lsp", + "markdown", + "menu", + "multi_buffer", + "paths", + "project", + "regex", + "serde_json", + "settings", + "supermaven", + "telemetry", + "text", + "theme", + "time", + "ui", + "util", + "workspace", + "zed_actions", + "zeta_prompt", +] + [[package]] name = "editor" version = "0.1.0" @@ -5370,8 +5382,9 @@ dependencies = [ "ctor", "dap", "db", - "edit_prediction", + "edit_prediction_types", "emojis", + "feature_flags", "file_icons", "fs", "futures 0.3.31", @@ -5398,7 +5411,8 @@ dependencies = [ "release_channel", "rope", "rpc", - "schemars 1.0.4", + "schemars", + "semver", "serde", "serde_json", "settings", @@ -5412,9 +5426,11 @@ dependencies = [ "text", "theme", "time", + "tracing", "tree-sitter-bash", "tree-sitter-c", "tree-sitter-html", + "tree-sitter-md", "tree-sitter-python", "tree-sitter-rust", "tree-sitter-typescript", @@ -5430,6 +5446,7 @@ dependencies = [ "workspace", "zed_actions", "zlog", + "ztracing", ] [[package]] @@ -5705,12 +5722,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "ethnum" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca81e6b4777c89fd810c25a4be2b1bd93ea034fbe58e6a75216a34c6b82c539b" - [[package]] name = "euclid" version = "0.22.11" @@ -5775,6 +5786,15 @@ dependencies = [ "watch", ] +[[package]] +name = "eval_utils" +version = "0.1.0" +dependencies = [ + "gpui", + "serde", + "smol", +] + [[package]] name = "event-listener" version = "2.5.3" @@ -5855,15 +5875,18 @@ dependencies = [ "gpui", "heck 0.5.0", "http_client", + "indoc", "language", "log", "lsp", "parking_lot", "pretty_assertions", - "semantic_version", + "proto", + "semver", "serde", "serde_json", "task", + "tempfile", "toml 0.8.23", "url", "util", @@ -5926,7 +5949,7 @@ dependencies = [ "release_channel", "remote", "reqwest_client", - "semantic_version", + "semver", "serde", "serde_json", "serde_json_lenient", @@ -5965,7 +5988,7 @@ dependencies = [ "picker", "project", "release_channel", - "semantic_version", + "semver", "serde", "settings", "smallvec", @@ -5985,40 +6008,17 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - -[[package]] -name = "fancy-regex" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" -dependencies = [ - "bit-set 0.5.3", - "regex-automata", - "regex-syntax", -] - [[package]] name = "fancy-regex" -version = "0.14.0" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" +checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f" dependencies = [ - "bit-set 0.8.0", + "bit-set", "regex-automata", "regex-syntax", ] -[[package]] -name = "fast-float2" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" - [[package]] name = "fast-srgb8" version = "1.0.0" @@ -6129,7 +6129,7 @@ dependencies = [ "picker", "pretty_assertions", "project", - "schemars 1.0.4", + "schemars", "search", "serde", "serde_json", @@ -6177,9 +6177,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "fixedbitset" @@ -6194,7 +6194,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc5a4e564e38c699f2880d3fda590bedc2e69f3f84cd48b457bd892ce61d0aa9" dependencies = [ "crc32fast", - "libz-rs-sys", "miniz_oxide", ] @@ -6230,9 +6229,9 @@ checksum = "8bf7cc16383c4b8d58b9905a8509f02926ce3058053c056376248d958c9df1e8" [[package]] name = "fluent-uri" -version = "0.3.2" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" +checksum = "bc74ac4d8359ae70623506d512209619e5cf8f347124910440dbc221714b328e" dependencies = [ "borrow-or-share", "ref-cast", @@ -6248,7 +6247,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -6359,9 +6358,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" [[package]] name = "fork" -version = "0.2.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05dc8b302e04a1c27f4fe694439ef0f29779ca4edc205b7b58f00db04e29656d" +checksum = "30268f1eefccc9d72f43692e8b89e659aeb52e84016c3b32b6e7e9f1c8f38f94" dependencies = [ "libc", ] @@ -6411,6 +6410,7 @@ dependencies = [ "git", "gpui", "ignore", + "is_executable", "libc", "log", "notify 8.2.0", @@ -6450,16 +6450,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "fs4" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8640e34b88f7652208ce9e88b1a37a2ae95227d84abec377ccd3c5cfeb141ed4" -dependencies = [ - "rustix 1.1.2", - "windows-sys 0.59.0", -] - [[package]] name = "fs_benchmarks" version = "0.1.0" @@ -6921,7 +6911,21 @@ dependencies = [ ] [[package]] -name = "generic-array" +name = "generator" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2" +dependencies = [ + "cc", + "cfg-if", + "libc", + "log", + "rustversion", + "windows 0.61.3", +] + +[[package]] +name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" @@ -6970,13 +6974,13 @@ dependencies = [ [[package]] name = "gh-workflow" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435" +source = "git+https://github.com/zed-industries/gh-workflow?rev=09acfdf2bd5c1d6254abefd609c808ff73547b2c#09acfdf2bd5c1d6254abefd609c808ff73547b2c" dependencies = [ "async-trait", "derive_more 2.0.1", "derive_setters", "gh-workflow-macros", - "indexmap 2.11.4", + "indexmap", "merge", "serde", "serde_json", @@ -6987,7 +6991,7 @@ dependencies = [ [[package]] name = "gh-workflow-macros" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435" +source = "git+https://github.com/zed-industries/gh-workflow?rev=09acfdf2bd5c1d6254abefd609c808ff73547b2c#09acfdf2bd5c1d6254abefd609c808ff73547b2c" dependencies = [ "heck 0.5.0", "quote", @@ -7011,7 +7015,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" dependencies = [ "fallible-iterator", - "indexmap 2.11.4", + "indexmap", "stable_deref_trait", ] @@ -7041,7 +7045,7 @@ dependencies = [ "rand 0.9.2", "regex", "rope", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "smol", @@ -7081,6 +7085,7 @@ dependencies = [ "gpui", "http_client", "indoc", + "itertools 0.14.0", "pretty_assertions", "regex", "serde", @@ -7110,6 +7115,7 @@ dependencies = [ "futures 0.3.31", "fuzzy", "git", + "git_hosting_providers", "gpui", "indoc", "itertools 0.14.0", @@ -7125,17 +7131,21 @@ dependencies = [ "picker", "pretty_assertions", "project", + "prompt_store", + "rand 0.9.2", "recent_projects", "remote", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", + "smol", "strum 0.27.2", "telemetry", "theme", "time", "time_format", + "tracing", "ui", "unindent", "util", @@ -7145,6 +7155,7 @@ dependencies = [ "zed_actions", "zeroize", "zlog", + "ztracing", ] [[package]] @@ -7231,7 +7242,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -7287,6 +7298,7 @@ dependencies = [ "calloop", "calloop-wayland-source", "cbindgen", + "circular-buffer", "cocoa 0.26.0", "cocoa-foundation 0.2.0", "collections", @@ -7313,6 +7325,7 @@ dependencies = [ "libc", "log", "lyon", + "mach2 0.5.0", "media", "metal", "naga", @@ -7334,14 +7347,15 @@ dependencies = [ "refineable", "reqwest_client", "resvg", - "schemars 1.0.4", + "schemars", "seahash", - "semantic_version", + "semver", "serde", "serde_json", "slotmap", "smallvec", "smol", + "spin 0.10.0", "stacksafe", "strum 0.27.2", "sum_tree", @@ -7421,7 +7435,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.11.4", + "indexmap", "slab", "tokio", "tokio-util", @@ -7440,7 +7454,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.3.1", - "indexmap 2.11.4", + "indexmap", "slab", "tokio", "tokio-util", @@ -7519,10 +7533,20 @@ dependencies = [ "allocator-api2", "equivalent", "foldhash 0.1.5", - "rayon", "serde", ] +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", +] + [[package]] name = "hashlink" version = "0.8.4" @@ -7620,7 +7644,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c255bdf46e07fb840d120a36dcc81f385140d7191c76a7391672675c01a55d" dependencies = [ - "bincode 1.3.3", + "bincode", "byteorder", "heed-traits", "serde", @@ -7810,7 +7834,6 @@ dependencies = [ "tempfile", "url", "util", - "zed-reqwest", ] [[package]] @@ -8203,17 +8226,6 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - [[package]] name = "indexmap" version = "2.11.4" @@ -8221,7 +8233,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.1", "serde", "serde_core", ] @@ -8391,7 +8403,7 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb8251fb7bcd9ccd3725ed8deae9fe7db8e586495c9eb5b0c52e6233e5e75ea" dependencies = [ - "bincode 1.3.3", + "bincode", "crossbeam-channel", "fnv", "lazy_static", @@ -8450,6 +8462,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "is_executable" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baabb8b4867b26294d818bf3f651a454b6901431711abb96e296245888d6e8c4" +dependencies = [ + "windows-sys 0.60.2", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -8602,7 +8623,7 @@ dependencies = [ "language", "paths", "project", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -8614,21 +8635,21 @@ dependencies = [ [[package]] name = "jsonschema" -version = "0.30.0" +version = "0.37.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1b46a0365a611fbf1d2143104dcf910aada96fafd295bab16c60b802bf6fa1d" +checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e" dependencies = [ "ahash 0.8.12", - "base64 0.22.1", "bytecount", + "data-encoding", "email_address", - "fancy-regex 0.14.0", + "fancy-regex", "fraction", + "getrandom 0.3.4", "idna", "itoa", "num-cmp", "num-traits", - "once_cell", "percent-encoding", "referencing", "regex", @@ -8636,6 +8657,7 @@ dependencies = [ "reqwest 0.12.24", "serde", "serde_json", + "unicode-general-category", "uuid-simd", ] @@ -8656,23 +8678,25 @@ dependencies = [ [[package]] name = "jupyter-protocol" -version = "0.6.0" -source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c047f6b5e551563af2ddb13dafed833f0ec5a5b0f9621d5ad740a9ff1e1095" dependencies = [ - "anyhow", "async-trait", "bytes 1.10.1", "chrono", "futures 0.3.31", "serde", "serde_json", + "thiserror 2.0.17", "uuid", ] [[package]] name = "jupyter-websocket-client" -version = "0.9.0" -source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4197fa926a6b0bddfed7377d9fed3d00a0dec44a1501e020097bd26604699cae" dependencies = [ "anyhow", "async-trait", @@ -8681,6 +8705,7 @@ dependencies = [ "jupyter-protocol", "serde", "serde_json", + "tokio", "url", "uuid", ] @@ -8718,7 +8743,6 @@ dependencies = [ "ui", "ui_input", "util", - "vim", "workspace", "zed_actions", ] @@ -8784,6 +8808,7 @@ dependencies = [ "ctor", "diffy", "ec4rs", + "encoding_rs", "fs", "futures 0.3.31", "fuzzy", @@ -8801,7 +8826,8 @@ dependencies = [ "rand 0.9.2", "regex", "rpc", - "schemars 1.0.4", + "schemars", + "semver", "serde", "serde_json", "settings", @@ -8864,23 +8890,25 @@ dependencies = [ "cloud_api_types", "cloud_llm_client", "collections", + "credentials_provider", "futures 0.3.31", "gpui", "http_client", "icons", "image", "log", + "open_ai", "open_router", "parking_lot", "proto", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", "smol", - "telemetry_events", "thiserror 2.0.17", "util", + "zed_env_vars", ] [[package]] @@ -8922,7 +8950,8 @@ dependencies = [ "partial-json-fixer", "project", "release_channel", - "schemars 1.0.4", + "schemars", + "semver", "serde", "serde_json", "settings", @@ -8936,7 +8965,6 @@ dependencies = [ "util", "vercel", "x_ai", - "zed_env_vars", ] [[package]] @@ -8988,6 +9016,7 @@ dependencies = [ "project", "proto", "release_channel", + "semver", "serde_json", "settings", "theme", @@ -9011,6 +9040,7 @@ dependencies = [ "chrono", "collections", "futures 0.3.31", + "globset", "gpui", "http_client", "itertools 0.14.0", @@ -9032,11 +9062,14 @@ dependencies = [ "regex", "rope", "rust-embed", + "semver", "serde", "serde_json", "serde_json_lenient", "settings", + "smallvec", "smol", + "snippet", "task", "terminal", "text", @@ -9072,7 +9105,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -9217,15 +9250,6 @@ dependencies = [ "webrtc-sys", ] -[[package]] -name = "libz-rs-sys" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "840db8cf39d9ec4dd794376f38acc40d0fc65eec2a8f484f7fd375b84602becd" -dependencies = [ - "zlib-rs", -] - [[package]] name = "libz-sys" version = "1.1.22" @@ -9288,12 +9312,6 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" -[[package]] -name = "litrs" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" - [[package]] name = "livekit" version = "0.7.8" @@ -9439,7 +9457,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", ] @@ -9463,6 +9481,19 @@ dependencies = [ "value-bag", ] +[[package]] +name = "loom" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber", +] + [[package]] name = "loop9" version = "0.1.5" @@ -9502,7 +9533,8 @@ dependencies = [ "parking_lot", "postage", "release_channel", - "schemars 1.0.4", + "schemars", + "semver", "serde", "serde_json", "smol", @@ -9584,25 +9616,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "lz4" -version = "1.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" -dependencies = [ - "lz4-sys", -] - -[[package]] -name = "lz4-sys" -version = "1.11.1+lz4-1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "mac" version = "0.1.1" @@ -10014,6 +10027,18 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "miniprofiler_ui" +version = "0.1.0" +dependencies = [ + "gpui", + "serde_json", + "smol", + "util", + "workspace", + "zed_actions", +] + [[package]] name = "miniz_oxide" version = "0.8.9" @@ -10070,7 +10095,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "strum 0.27.2", @@ -10139,9 +10164,11 @@ dependencies = [ "sum_tree", "text", "theme", + "tracing", "tree-sitter", "util", "zlog", + "ztracing", ] [[package]] @@ -10163,14 +10190,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b977c445f26e49757f9aca3631c3b8b836942cb278d69a92e7b80d3b24da632" dependencies = [ "arrayvec", - "bit-set 0.8.0", + "bit-set", "bitflags 2.9.4", "cfg_aliases 0.2.1", "codespan-reporting 0.12.0", "half", "hashbrown 0.15.5", "hexf-parse", - "indexmap 2.11.4", + "indexmap", "log", "num-traits", "once_cell", @@ -10218,8 +10245,9 @@ dependencies = [ [[package]] name = "nbformat" -version = "0.10.0" -source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c7229d604d847227002715e1235cd84e81919285d904ccb290a42ecc409348" dependencies = [ "anyhow", "chrono", @@ -10452,15 +10480,6 @@ name = "notify-types" version = "2.0.0" source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419" -[[package]] -name = "now" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d89e9874397a1f0a52fc1f197a8effd9735223cb2390e9dcc83ac6cd02923d0" -dependencies = [ - "chrono", -] - [[package]] name = "ntapi" version = "0.4.1" @@ -10505,11 +10524,10 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ - "byteorder", "lazy_static", "libm", "num-integer", @@ -10844,7 +10862,7 @@ checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "crc32fast", "hashbrown 0.15.5", - "indexmap 2.11.4", + "indexmap", "memchr", ] @@ -10857,41 +10875,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "object_store" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c1be0c6c22ec0817cdc77d3842f721a17fd30ab6965001415b5402a74e6b740" -dependencies = [ - "async-trait", - "base64 0.22.1", - "bytes 1.10.1", - "chrono", - "form_urlencoded", - "futures 0.3.31", - "http 1.3.1", - "http-body-util", - "humantime", - "hyper 1.7.0", - "itertools 0.14.0", - "parking_lot", - "percent-encoding", - "quick-xml 0.38.3", - "rand 0.9.2", - "reqwest 0.12.24", - "ring", - "serde", - "serde_json", - "serde_urlencoded", - "thiserror 2.0.17", - "tokio", - "tracing", - "url", - "walkdir", - "wasm-bindgen-futures", - "web-time", -] - [[package]] name = "ollama" version = "0.1.0" @@ -10899,7 +10882,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -10916,13 +10899,12 @@ dependencies = [ "documented", "fs", "fuzzy", - "git", "gpui", "menu", "notifications", "picker", "project", - "schemars 1.0.4", + "schemars", "serde", "settings", "telemetry", @@ -11007,11 +10989,12 @@ dependencies = [ "futures 0.3.31", "http_client", "log", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", "strum 0.27.2", + "thiserror 2.0.17", ] [[package]] @@ -11021,7 +11004,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -11525,7 +11508,7 @@ dependencies = [ [[package]] name = "pet" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "clap", "env_logger 0.10.2", @@ -11550,6 +11533,7 @@ dependencies = [ "pet-python-utils", "pet-reporter", "pet-telemetry", + "pet-uv", "pet-venv", "pet-virtualenv", "pet-virtualenvwrapper", @@ -11562,7 +11546,7 @@ dependencies = [ [[package]] name = "pet-conda" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11581,7 +11565,7 @@ dependencies = [ [[package]] name = "pet-core" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "clap", "lazy_static", @@ -11596,7 +11580,7 @@ dependencies = [ [[package]] name = "pet-env-var-path" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "lazy_static", "log", @@ -11612,7 +11596,7 @@ dependencies = [ [[package]] name = "pet-fs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11621,7 +11605,7 @@ dependencies = [ [[package]] name = "pet-global-virtualenvs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11634,7 +11618,7 @@ dependencies = [ [[package]] name = "pet-homebrew" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "lazy_static", "log", @@ -11652,7 +11636,7 @@ dependencies = [ [[package]] name = "pet-jsonrpc" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "env_logger 0.10.2", "log", @@ -11665,7 +11649,7 @@ dependencies = [ [[package]] name = "pet-linux-global-python" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11678,7 +11662,7 @@ dependencies = [ [[package]] name = "pet-mac-commandlinetools" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11691,7 +11675,7 @@ dependencies = [ [[package]] name = "pet-mac-python-org" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11704,7 +11688,7 @@ dependencies = [ [[package]] name = "pet-mac-xcode" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11717,7 +11701,7 @@ dependencies = [ [[package]] name = "pet-pipenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11730,7 +11714,7 @@ dependencies = [ [[package]] name = "pet-pixi" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11742,7 +11726,7 @@ dependencies = [ [[package]] name = "pet-poetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "base64 0.22.1", "lazy_static", @@ -11763,7 +11747,7 @@ dependencies = [ [[package]] name = "pet-pyenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "lazy_static", "log", @@ -11781,7 +11765,7 @@ dependencies = [ [[package]] name = "pet-python-utils" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11798,7 +11782,7 @@ dependencies = [ [[package]] name = "pet-reporter" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "env_logger 0.10.2", "log", @@ -11812,7 +11796,7 @@ dependencies = [ [[package]] name = "pet-telemetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11824,10 +11808,22 @@ dependencies = [ "regex", ] +[[package]] +name = "pet-uv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" +dependencies = [ + "log", + "pet-core", + "pet-python-utils", + "serde", + "toml 0.9.8", +] + [[package]] name = "pet-venv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11839,7 +11835,7 @@ dependencies = [ [[package]] name = "pet-virtualenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11851,7 +11847,7 @@ dependencies = [ [[package]] name = "pet-virtualenvwrapper" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "log", "msvc_spectre_libs", @@ -11864,7 +11860,7 @@ dependencies = [ [[package]] name = "pet-windows-registry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "lazy_static", "log", @@ -11882,7 +11878,7 @@ dependencies = [ [[package]] name = "pet-windows-store" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" dependencies = [ "lazy_static", "log", @@ -11902,7 +11898,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.11.4", + "indexmap", ] [[package]] @@ -12018,7 +12014,7 @@ dependencies = [ "env_logger 0.11.8", "gpui", "menu", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "theme", @@ -12118,16 +12114,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" -[[package]] -name = "planus" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3daf8e3d4b712abe1d690838f6e29fb76b76ea19589c4afa39ec30e12f62af71" -dependencies = [ - "array-init-cursor", - "hashbrown 0.15.5", -] - [[package]] name = "plist" version = "1.8.0" @@ -12135,7 +12121,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" dependencies = [ "base64 0.22.1", - "indexmap 2.11.4", + "indexmap", "quick-xml 0.38.3", "serde", "time", @@ -12196,559 +12182,54 @@ dependencies = [ ] [[package]] -name = "polars" -version = "0.51.0" +name = "polling" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5f7feb5d56b954e691dff22a8b2d78d77433dcc93c35fe21c3777fdc121b697" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ - "getrandom 0.2.16", - "getrandom 0.3.4", - "polars-arrow", - "polars-core", - "polars-error", - "polars-io", - "polars-lazy", - "polars-ops", - "polars-parquet", - "polars-sql", - "polars-time", - "polars-utils", - "version_check", + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] -name = "polars-arrow" -version = "0.51.0" +name = "pollster" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b4fed2343961b3eea3db2cee165540c3e1ad9d5782350cc55a9e76cf440148" -dependencies = [ - "atoi_simd", - "bitflags 2.9.4", - "bytemuck", - "chrono", - "chrono-tz", - "dyn-clone", - "either", - "ethnum", - "getrandom 0.2.16", - "getrandom 0.3.4", - "hashbrown 0.15.5", - "itoa", - "lz4", - "num-traits", - "polars-arrow-format", - "polars-error", - "polars-schema", - "polars-utils", - "serde", - "simdutf8", - "streaming-iterator", - "strum_macros 0.27.2", - "version_check", - "zstd 0.13.3", -] +checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" [[package]] -name = "polars-arrow-format" -version = "0.2.1" +name = "pori" +version = "0.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a556ac0ee744e61e167f34c1eb0013ce740e0ee6cd8c158b2ec0b518f10e6675" +checksum = "a4a63d338dec139f56dacc692ca63ad35a6be6a797442479b55acd611d79e906" dependencies = [ - "planus", - "serde", + "nom 7.1.3", ] [[package]] -name = "polars-compute" -version = "0.51.0" +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "portable-atomic-util" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138785beda4e4a90a025219f09d0d15a671b2be9091513ede58e05db6ad4413f" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ - "atoi_simd", - "bytemuck", - "chrono", - "either", - "fast-float2", - "hashbrown 0.15.5", - "itoa", - "num-traits", - "polars-arrow", - "polars-error", - "polars-utils", - "rand 0.9.2", - "ryu", - "serde", - "skiplist", - "strength_reduce", - "strum_macros 0.27.2", - "version_check", + "portable-atomic", ] [[package]] -name = "polars-core" -version = "0.51.0" +name = "portable-pty" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e77b1f08ef6dbb032bb1d0d3365464be950df9905f6827a95b24c4ca5518901d" -dependencies = [ - "bitflags 2.9.4", - "boxcar", - "bytemuck", - "chrono", - "chrono-tz", - "comfy-table", - "either", - "hashbrown 0.15.5", - "indexmap 2.11.4", - "itoa", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-dtype", - "polars-error", - "polars-row", - "polars-schema", - "polars-utils", - "rand 0.9.2", - "rand_distr", - "rayon", - "regex", - "serde", - "serde_json", - "strum_macros 0.27.2", - "uuid", - "version_check", - "xxhash-rust", -] - -[[package]] -name = "polars-dtype" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89c43d0ea57168be4546c4d8064479ed8b29a9c79c31a0c7c367ee734b9b7158" -dependencies = [ - "boxcar", - "hashbrown 0.15.5", - "polars-arrow", - "polars-error", - "polars-utils", - "serde", - "uuid", -] - -[[package]] -name = "polars-error" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9cb5d98f59f8b94673ee391840440ad9f0d2170afced95fc98aa86f895563c0" -dependencies = [ - "object_store", - "parking_lot", - "polars-arrow-format", - "regex", - "signal-hook", - "simdutf8", -] - -[[package]] -name = "polars-expr" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "343931b818cf136349135ba11dbc18c27683b52c3477b1ba8ca606cf5ab1965c" -dependencies = [ - "bitflags 2.9.4", - "hashbrown 0.15.5", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-io", - "polars-ops", - "polars-plan", - "polars-row", - "polars-time", - "polars-utils", - "rand 0.9.2", - "rayon", - "recursive", -] - -[[package]] -name = "polars-io" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10388c64b8155122488229a881d1c6f4fdc393bc988e764ab51b182fcb2307e4" -dependencies = [ - "async-trait", - "atoi_simd", - "blake3", - "bytes 1.10.1", - "chrono", - "fast-float2", - "fs4", - "futures 0.3.31", - "glob", - "hashbrown 0.15.5", - "home", - "itoa", - "memchr", - "memmap2", - "num-traits", - "object_store", - "percent-encoding", - "polars-arrow", - "polars-core", - "polars-error", - "polars-parquet", - "polars-schema", - "polars-time", - "polars-utils", - "rayon", - "regex", - "reqwest 0.12.24", - "ryu", - "serde", - "serde_json", - "simdutf8", - "tokio", - "tokio-util", - "url", -] - -[[package]] -name = "polars-lazy" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fb6e2c6c2fa4ea0c660df1c06cf56960c81e7c2683877995bae3d4e3d408147" -dependencies = [ - "bitflags 2.9.4", - "chrono", - "either", - "memchr", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-expr", - "polars-io", - "polars-mem-engine", - "polars-ops", - "polars-plan", - "polars-stream", - "polars-time", - "polars-utils", - "rayon", - "version_check", -] - -[[package]] -name = "polars-mem-engine" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a856e98e253587c28d8132a5e7e5a75cb2c44731ca090f1481d45f1d123771" -dependencies = [ - "futures 0.3.31", - "memmap2", - "polars-arrow", - "polars-core", - "polars-error", - "polars-expr", - "polars-io", - "polars-ops", - "polars-plan", - "polars-time", - "polars-utils", - "rayon", - "recursive", - "tokio", -] - -[[package]] -name = "polars-ops" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf6062173fdc9ba05775548beb66e76643a148d9aeadc9984ed712bc4babd76" -dependencies = [ - "argminmax", - "base64 0.22.1", - "bytemuck", - "chrono", - "chrono-tz", - "either", - "hashbrown 0.15.5", - "hex", - "indexmap 2.11.4", - "libm", - "memchr", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-error", - "polars-schema", - "polars-utils", - "rayon", - "regex", - "regex-syntax", - "strum_macros 0.27.2", - "unicode-normalization", - "unicode-reverse", - "version_check", -] - -[[package]] -name = "polars-parquet" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1d769180dec070df0dc4b89299b364bf2cfe32b218ecc4ddd8f1a49ae60669" -dependencies = [ - "async-stream", - "base64 0.22.1", - "brotli", - "bytemuck", - "ethnum", - "flate2", - "futures 0.3.31", - "hashbrown 0.15.5", - "lz4", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-error", - "polars-parquet-format", - "polars-utils", - "serde", - "simdutf8", - "snap", - "streaming-decompression", - "zstd 0.13.3", -] - -[[package]] -name = "polars-parquet-format" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c025243dcfe8dbc57e94d9f82eb3bef10b565ab180d5b99bed87fd8aea319ce1" -dependencies = [ - "async-trait", - "futures 0.3.31", -] - -[[package]] -name = "polars-plan" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd3a2e33ae4484fe407ab2d2ba5684f0889d1ccf3ad6b844103c03638e6d0a0" -dependencies = [ - "bitflags 2.9.4", - "bytemuck", - "bytes 1.10.1", - "chrono", - "chrono-tz", - "either", - "futures 0.3.31", - "hashbrown 0.15.5", - "memmap2", - "num-traits", - "percent-encoding", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-error", - "polars-io", - "polars-ops", - "polars-parquet", - "polars-time", - "polars-utils", - "rayon", - "recursive", - "regex", - "sha2", - "strum_macros 0.27.2", - "version_check", -] - -[[package]] -name = "polars-row" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18734f17e0e348724df3ae65f3ee744c681117c04b041cac969dfceb05edabc0" -dependencies = [ - "bitflags 2.9.4", - "bytemuck", - "polars-arrow", - "polars-compute", - "polars-dtype", - "polars-error", - "polars-utils", -] - -[[package]] -name = "polars-schema" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6c1ab13e04d5167661a9854ed1ea0482b2ed9b8a0f1118dabed7cd994a85e3" -dependencies = [ - "indexmap 2.11.4", - "polars-error", - "polars-utils", - "serde", - "version_check", -] - -[[package]] -name = "polars-sql" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4e7766da02cc1d464994404d3e88a7a0ccd4933df3627c325480fbd9bbc0a11" -dependencies = [ - "bitflags 2.9.4", - "hex", - "polars-core", - "polars-error", - "polars-lazy", - "polars-ops", - "polars-plan", - "polars-time", - "polars-utils", - "rand 0.9.2", - "regex", - "serde", - "sqlparser", -] - -[[package]] -name = "polars-stream" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f6c6ca1ea01f9dea424d167e4f33f5ec44cd67fbfac9efd40575ed20521f14" -dependencies = [ - "async-channel 2.5.0", - "async-trait", - "atomic-waker", - "bitflags 2.9.4", - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-queue", - "crossbeam-utils", - "futures 0.3.31", - "memmap2", - "parking_lot", - "percent-encoding", - "pin-project-lite", - "polars-arrow", - "polars-core", - "polars-error", - "polars-expr", - "polars-io", - "polars-mem-engine", - "polars-ops", - "polars-parquet", - "polars-plan", - "polars-utils", - "rand 0.9.2", - "rayon", - "recursive", - "slotmap", - "tokio", - "tokio-util", - "version_check", -] - -[[package]] -name = "polars-time" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a3a6e279a7a984a0b83715660f9e880590c6129ec2104396bfa710bcd76dee" -dependencies = [ - "atoi_simd", - "bytemuck", - "chrono", - "chrono-tz", - "now", - "num-traits", - "polars-arrow", - "polars-compute", - "polars-core", - "polars-error", - "polars-ops", - "polars-utils", - "rayon", - "regex", - "strum_macros 0.27.2", -] - -[[package]] -name = "polars-utils" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57b267021b0e5422d7fbc70fd79e51b9f9a8466c585779373a18b0199e973f29" -dependencies = [ - "bincode 2.0.1", - "bytemuck", - "bytes 1.10.1", - "compact_str", - "either", - "flate2", - "foldhash 0.1.5", - "hashbrown 0.15.5", - "indexmap 2.11.4", - "libc", - "memmap2", - "num-traits", - "polars-error", - "rand 0.9.2", - "raw-cpuid 11.6.0", - "rayon", - "regex", - "rmp-serde", - "serde", - "serde_json", - "serde_stacker", - "slotmap", - "stacker", - "uuid", - "version_check", -] - -[[package]] -name = "polling" -version = "3.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi", - "pin-project-lite", - "rustix 1.1.2", - "windows-sys 0.61.2", -] - -[[package]] -name = "pollster" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" - -[[package]] -name = "portable-atomic" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" - -[[package]] -name = "portable-atomic-util" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" -dependencies = [ - "portable-atomic", -] - -[[package]] -name = "portable-pty" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4a596a2b3d2752d94f51fac2d4a96737b8705dddd311a32b9af47211f08671e" +checksum = "b4a596a2b3d2752d94f51fac2d4a96737b8705dddd311a32b9af47211f08671e" dependencies = [ "anyhow", "bitflags 1.3.2", @@ -12995,8 +12476,10 @@ dependencies = [ "context_server", "dap", "dap_adapters", + "db", + "encoding_rs", "extension", - "fancy-regex 0.14.0", + "fancy-regex", "fs", "futures 0.3.31", "fuzzy", @@ -13007,7 +12490,7 @@ dependencies = [ "gpui", "http_client", "image", - "indexmap 2.11.4", + "indexmap", "itertools 0.14.0", "language", "log", @@ -13024,7 +12507,7 @@ dependencies = [ "release_channel", "remote", "rpc", - "schemars 1.0.4", + "schemars", "semver", "serde", "serde_json", @@ -13041,14 +12524,34 @@ dependencies = [ "terminal", "text", "toml 0.8.23", + "tracing", "unindent", "url", "util", "watch", + "wax", "which 6.0.3", "worktree", "zeroize", "zlog", + "ztracing", +] + +[[package]] +name = "project_benchmarks" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "client", + "futures 0.3.31", + "gpui", + "http_client", + "language", + "node_runtime", + "project", + "settings", + "watch", ] [[package]] @@ -13071,13 +12574,14 @@ dependencies = [ "pretty_assertions", "project", "rayon", - "schemars 1.0.4", + "schemars", "search", "serde", "serde_json", "settings", "smallvec", "telemetry", + "tempfile", "theme", "ui", "util", @@ -13101,6 +12605,7 @@ dependencies = [ "picker", "project", "release_channel", + "semver", "serde_json", "settings", "theme", @@ -13346,13 +12851,12 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pulley-interpreter" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62d95f8575df49a2708398182f49a888cf9dc30210fb1fd2df87c889edcee75d" +checksum = "986beaef947a51d17b42b0ea18ceaa88450d35b6994737065ed505c39172db71" dependencies = [ "cranelift-bitset", "log", - "sptr", "wasmtime-math", ] @@ -13431,7 +12935,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", - "serde", ] [[package]] @@ -13733,6 +13236,7 @@ dependencies = [ "askpass", "auto_update", "dap", + "db", "editor", "extension_host", "file_finder", @@ -13744,12 +13248,14 @@ dependencies = [ "log", "markdown", "menu", + "node_runtime", "ordered-float 2.10.1", "paths", "picker", "project", "release_channel", "remote", + "semver", "serde", "serde_json", "settings", @@ -13761,29 +13267,10 @@ dependencies = [ "util", "windows-registry 0.6.1", "workspace", + "worktree", "zed_actions", ] -[[package]] -name = "recursive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0786a43debb760f491b1bc0269fe5e84155353c67482b9e60d0cfb596054b43e" -dependencies = [ - "recursive-proc-macro-impl", - "stacker", -] - -[[package]] -name = "recursive-proc-macro-impl" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b" -dependencies = [ - "quote", - "syn 2.0.106", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -13846,13 +13333,14 @@ dependencies = [ [[package]] name = "referencing" -version = "0.30.0" +version = "0.37.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8eff4fa778b5c2a57e85c5f2fe3a709c52f0e60d23146e2151cbef5893f420e" +checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c" dependencies = [ "ahash 0.8.12", "fluent-uri", - "once_cell", + "getrandom 0.3.4", + "hashbrown 0.16.1", "parking_lot", "percent-encoding", "serde_json", @@ -13867,9 +13355,9 @@ dependencies = [ [[package]] name = "regalloc2" -version = "0.11.2" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc06e6b318142614e4a48bc725abbf08ff166694835c43c9dae5a9009704639a" +checksum = "5216b1837de2149f8bc8e6d5f88a9326b63b8c836ed58ce4a0a29ec736a59734" dependencies = [ "allocator-api2", "bumpalo", @@ -13920,6 +13408,7 @@ name = "release_channel" version = "0.1.0" dependencies = [ "gpui", + "semver", ] [[package]] @@ -13939,7 +13428,8 @@ dependencies = [ "prost 0.9.0", "release_channel", "rpc", - "schemars 1.0.4", + "schemars", + "semver", "serde", "serde_json", "settings", @@ -13996,6 +13486,7 @@ dependencies = [ "paths", "pretty_assertions", "project", + "prompt_store", "proto", "rayon", "release_channel", @@ -14003,6 +13494,7 @@ dependencies = [ "reqwest_client", "rpc", "rust-embed", + "semver", "serde", "serde_json", "settings", @@ -14010,6 +13502,7 @@ dependencies = [ "smol", "sysinfo 0.37.2", "task", + "theme", "thiserror 2.0.17", "toml 0.8.23", "unindent", @@ -14134,35 +13627,26 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.12", "http 1.3.1", "http-body 1.0.1", "http-body-util", "hyper 1.7.0", - "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", "percent-encoding", "pin-project-lite", - "quinn", - "rustls 0.23.33", - "rustls-native-certs 0.8.2", - "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", - "tokio-rustls 0.26.2", - "tokio-util", "tower 0.5.2", "tower-http 0.6.6", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-streams", "web-sys", ] @@ -14241,7 +13725,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -14285,17 +13769,6 @@ dependencies = [ "paste", ] -[[package]] -name = "rmp-serde" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" -dependencies = [ - "byteorder", - "rmp", - "serde", -] - [[package]] name = "rmpv" version = "1.3.0" @@ -14332,9 +13805,11 @@ dependencies = [ "rand 0.9.2", "rayon", "sum_tree", + "tracing", "unicode-segmentation", "util", "zlog", + "ztracing", ] [[package]] @@ -14365,14 +13840,14 @@ dependencies = [ "tracing", "util", "zlog", - "zstd 0.11.2+zstd.1.5.2", + "zstd", ] [[package]] name = "rsa" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" dependencies = [ "const-oid", "digest", @@ -14422,25 +13897,26 @@ dependencies = [ [[package]] name = "runtimelib" -version = "0.25.0" -source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "481b48894073a0096f28cbe9860af01fc1b861e55b3bc96afafc645ee3de62dc" dependencies = [ - "anyhow", "async-dispatcher", "async-std", + "aws-lc-rs", "base64 0.22.1", "bytes 1.10.1", "chrono", "data-encoding", - "dirs 5.0.1", + "dirs 6.0.0", "futures 0.3.31", "glob", "jupyter-protocol", - "ring", "serde", "serde_json", "shellexpand 3.1.1", "smol", + "thiserror 2.0.17", "uuid", "zeromq", ] @@ -14708,7 +14184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ "ring", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -14720,7 +14196,7 @@ dependencies = [ "aws-lc-rs", "ring", "rustls-pki-types", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -14826,24 +14302,13 @@ dependencies = [ "anyhow", "clap", "env_logger 0.11.8", - "schemars 1.0.4", + "schemars", "serde", "serde_json", + "settings", "theme", ] -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - [[package]] name = "schemars" version = "1.0.4" @@ -14851,7 +14316,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", - "indexmap 2.11.4", + "indexmap", "ref-cast", "schemars_derive", "serde", @@ -14950,7 +14415,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -15059,22 +14524,26 @@ dependencies = [ "editor", "futures 0.3.31", "gpui", + "itertools 0.14.0", "language", "lsp", "menu", + "pretty_assertions", "project", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", "smol", "theme", + "tracing", "ui", "unindent", "util", "util_macros", "workspace", "zed_actions", + "ztracing", ] [[package]] @@ -15133,14 +14602,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f7d95a54511e0c7be3f51e8867aa8cf35148d7b9445d44de2f943e2b206e749" -[[package]] -name = "semantic_version" -version = "0.1.0" -dependencies = [ - "anyhow", - "serde", -] - [[package]] name = "semver" version = "1.0.27" @@ -15213,7 +14674,7 @@ version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ - "indexmap 2.11.4", + "indexmap", "itoa", "memchr", "ryu", @@ -15227,7 +14688,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e033097bf0d2b59a62b42c18ebbb797503839b26afdda2c4e1415cb6c813540" dependencies = [ - "indexmap 2.11.4", + "indexmap", "itoa", "memchr", "ryu", @@ -15274,17 +14735,6 @@ dependencies = [ "serde_core", ] -[[package]] -name = "serde_stacker" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4936375d50c4be7eff22293a9344f8e46f323ed2b3c243e52f89138d9bb0f4a" -dependencies = [ - "serde", - "serde_core", - "stacker", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -15297,44 +14747,13 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_with" -version = "3.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6093cd8c01b25262b84927e0f7151692158fab02d961e04c979d3903eba7ecc5" -dependencies = [ - "base64 0.22.1", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.11.4", - "schemars 0.9.0", - "schemars 1.0.4", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7e6c180db0816026a61afa1cff5344fb7ebded7e4d3062772179f2501481c27" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.11.4", + "indexmap", "itoa", "ryu", "serde", @@ -15382,12 +14801,11 @@ dependencies = [ "pretty_assertions", "release_channel", "rust-embed", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", "serde_repr", - "serde_with", "settings_json", "settings_macros", "smallvec", @@ -15450,6 +14868,8 @@ dependencies = [ "assets", "bm25", "client", + "copilot", + "edit_prediction", "editor", "feature_flags", "fs", @@ -15458,6 +14878,7 @@ dependencies = [ "gpui", "heck 0.5.0", "language", + "language_models", "log", "menu", "node_runtime", @@ -15466,7 +14887,7 @@ dependencies = [ "pretty_assertions", "project", "release_channel", - "schemars 1.0.4", + "schemars", "search", "serde", "session", @@ -15658,16 +15079,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" -[[package]] -name = "skiplist" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354fd282d3177c2951004953e2fdc4cb342fa159bbee8b829852b6a081c8ea1" -dependencies = [ - "rand 0.9.2", - "thiserror 2.0.17", -] - [[package]] name = "skrifa" version = "0.37.0" @@ -15743,12 +15154,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - [[package]] name = "snippet" version = "0.1.0" @@ -15770,7 +15175,7 @@ dependencies = [ "indoc", "parking_lot", "paths", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -15795,26 +15200,6 @@ dependencies = [ "workspace", ] -[[package]] -name = "soa-rs" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75ae4668062b095fda87ba54118697bed601f07f6c68bf50289a25ca0c8c935" -dependencies = [ - "soa-rs-derive", -] - -[[package]] -name = "soa-rs-derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c09121507da587d3434e5929ce3321162f36bd3eff403873cb163c06b176913" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "socket2" version = "0.5.10" @@ -15853,6 +15238,15 @@ dependencies = [ "lock_api", ] +[[package]] +name = "spin" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591" +dependencies = [ + "lock_api", +] + [[package]] name = "spirv" version = "0.3.0+sdk-1.3.268.0" @@ -15925,15 +15319,6 @@ dependencies = [ "unicode_categories", ] -[[package]] -name = "sqlparser" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05a528114c392209b3264855ad491fcce534b94a38771b0a0b97a79379275ce8" -dependencies = [ - "log", -] - [[package]] name = "sqlx" version = "0.8.6" @@ -15967,7 +15352,7 @@ dependencies = [ "futures-util", "hashbrown 0.15.5", "hashlink 0.10.0", - "indexmap 2.11.4", + "indexmap", "log", "memchr", "once_cell", @@ -16235,15 +15620,6 @@ dependencies = [ "ui", ] -[[package]] -name = "streaming-decompression" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf6cc3b19bfb128a8ad11026086e31d3ce9ad23f8ea37354b31383a187c44cf3" -dependencies = [ - "fallible-streaming-iterator", -] - [[package]] name = "streaming-iterator" version = "0.1.9" @@ -16375,7 +15751,9 @@ dependencies = [ "log", "rand 0.9.2", "rayon", + "tracing", "zlog", + "ztracing", ] [[package]] @@ -16385,7 +15763,7 @@ dependencies = [ "anyhow", "client", "collections", - "edit_prediction", + "edit_prediction_types", "editor", "env_logger 0.11.8", "futures 0.3.31", @@ -16509,10 +15887,10 @@ checksum = "0193cc4331cfd2f3d2011ef287590868599a2f33c3e69bc22c1a3d3acf9e02fb" name = "svg_preview" version = "0.1.0" dependencies = [ - "editor", "file_icons", "gpui", "language", + "multi_buffer", "ui", "workspace", ] @@ -16888,6 +16266,7 @@ dependencies = [ "human_bytes", "pciid-parser", "release_channel", + "semver", "serde", "sysinfo 0.37.2", ] @@ -16906,7 +16285,7 @@ dependencies = [ "menu", "picker", "project", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "settings", @@ -16985,7 +16364,7 @@ dependencies = [ "parking_lot", "pretty_assertions", "proto", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -17035,7 +16414,7 @@ dependencies = [ name = "telemetry_events" version = "0.1.0" dependencies = [ - "semantic_version", + "semver", "serde", "serde_json", ] @@ -17088,8 +16467,9 @@ dependencies = [ "rand 0.9.2", "regex", "release_channel", - "schemars 1.0.4", + "schemars", "serde", + "serde_json", "settings", "smol", "sysinfo 0.37.2", @@ -17099,6 +16479,7 @@ dependencies = [ "url", "urlencoding", "util", + "util_macros", "windows 0.61.3", ] @@ -17134,13 +16515,12 @@ dependencies = [ "project", "rand 0.9.2", "regex", - "schemars 1.0.4", + "schemars", "search", "serde", "serde_json", "settings", "shellexpand 2.1.2", - "smol", "task", "terminal", "theme", @@ -17186,7 +16566,7 @@ dependencies = [ "palette", "parking_lot", "refineable", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -17216,7 +16596,7 @@ dependencies = [ "clap", "collections", "gpui", - "indexmap 2.11.4", + "indexmap", "log", "palette", "serde", @@ -17312,13 +16692,13 @@ dependencies = [ [[package]] name = "tiktoken-rs" -version = "0.8.0" -source = "git+https://github.com/zed-industries/tiktoken-rs?rev=30c32a4522751699adeda0d5840c71c3b75ae73d#30c32a4522751699adeda0d5840c71c3b75ae73d" +version = "0.9.1" +source = "git+https://github.com/zed-industries/tiktoken-rs?rev=2570c4387a8505fb8f1d3f3557454b474f1e8271#2570c4387a8505fb8f1d3f3557454b474f1e8271" dependencies = [ "anyhow", "base64 0.22.1", "bstr", - "fancy-regex 0.13.0", + "fancy-regex", "lazy_static", "regex", "rustc-hash 1.1.0", @@ -17470,7 +16850,7 @@ dependencies = [ "project", "remote", "rpc", - "schemars 1.0.4", + "schemars", "serde", "settings", "smallvec", @@ -17627,7 +17007,6 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", - "futures-util", "pin-project-lite", "tokio", ] @@ -17659,7 +17038,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ - "indexmap 2.11.4", + "indexmap", "serde_core", "serde_spanned 1.0.3", "toml_datetime 0.7.3", @@ -17692,7 +17071,7 @@ version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.11.4", + "indexmap", "serde", "serde_spanned 0.6.9", "toml_datetime 0.6.11", @@ -17706,7 +17085,7 @@ version = "0.23.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" dependencies = [ - "indexmap 2.11.4", + "indexmap", "toml_datetime 0.7.3", "toml_parser", "winnow", @@ -17859,9 +17238,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647" dependencies = [ "log", "pin-project-lite", @@ -17871,9 +17250,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -17882,9 +17261,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c" dependencies = [ "once_cell", "valuable", @@ -17913,9 +17292,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.20" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ "matchers", "nu-ansi-term", @@ -17932,6 +17311,38 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "tracing-tracy" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eaa1852afa96e0fe9e44caa53dc0bd2d9d05e0f2611ce09f97f8677af56e4ba" +dependencies = [ + "tracing-core", + "tracing-subscriber", + "tracy-client", +] + +[[package]] +name = "tracy-client" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d722a05fe49b31fef971c4732a7d4aa6a18283d9ba46abddab35f484872947" +dependencies = [ + "loom", + "once_cell", + "tracy-client-sys", +] + +[[package]] +name = "tracy-client-sys" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" +dependencies = [ + "cc", + "windows-targets 0.52.6", +] + [[package]] name = "trait-variant" version = "0.1.2" @@ -17955,9 +17366,9 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.25.10" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87" +checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e" dependencies = [ "cc", "regex", @@ -17970,9 +17381,9 @@ dependencies = [ [[package]] name = "tree-sitter-bash" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6" +checksum = "9e5ec769279cc91b561d3df0d8a5deb26b0ad40d183127f409494d6d8fc53062" dependencies = [ "cc", "tree-sitter-language", @@ -18366,7 +17777,7 @@ dependencies = [ "icons", "itertools 0.14.0", "menu", - "schemars 1.0.4", + "schemars", "serde", "settings", "smallvec", @@ -18450,6 +17861,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce61d488bcdc9bc8b5d1772c404828b17fc481c0a582b5581e95fb233aef503e" +[[package]] +name = "unicode-general-category" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b993bddc193ae5bd0d623b49ec06ac3e9312875fdae725a975c51db1cc1677f" + [[package]] name = "unicode-ident" version = "1.0.19" @@ -18477,15 +17894,6 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" -[[package]] -name = "unicode-reverse" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6f4888ebc23094adfb574fdca9fdc891826287a6397d2cd28802ffd6f20c76" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "unicode-script" version = "0.5.7" @@ -18536,15 +17944,15 @@ checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "untrusted" -version = "0.9.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] -name = "unty" -version = "0.0.4" +name = "untrusted" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" @@ -18634,7 +18042,7 @@ dependencies = [ "rand 0.9.2", "regex", "rust-embed", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "serde_json_lenient", @@ -18678,7 +18086,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8" dependencies = [ "outref", - "uuid", "vsimd", ] @@ -18746,7 +18153,7 @@ name = "vercel" version = "0.1.0" dependencies = [ "anyhow", - "schemars 1.0.4", + "schemars", "serde", "strum 0.27.2", ] @@ -18786,9 +18193,11 @@ dependencies = [ "language", "log", "lsp", + "markdown_preview", "menu", "multi_buffer", "nvim-rs", + "outline_panel", "parking_lot", "perf", "picker", @@ -18796,8 +18205,9 @@ dependencies = [ "project_panel", "regex", "release_channel", - "schemars 1.0.4", + "schemars", "search", + "semver", "serde", "serde_json", "settings", @@ -18821,12 +18231,6 @@ dependencies = [ "settings", ] -[[package]] -name = "virtue" -version = "0.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" - [[package]] name = "vscode_theme" version = "0.2.0" @@ -19051,6 +18455,16 @@ dependencies = [ "wasmparser 0.227.1", ] +[[package]] +name = "wasm-encoder" +version = "0.229.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38ba1d491ecacb085a2552025c10a675a6fddcbd03b1fc9b36c536010ce265d2" +dependencies = [ + "leb128fmt", + "wasmparser 0.229.0", +] + [[package]] name = "wasm-metadata" version = "0.201.0" @@ -19058,7 +18472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fd83062c17b9f4985d438603cde0a5e8c5c8198201a6937f778b607924c7da2" dependencies = [ "anyhow", - "indexmap 2.11.4", + "indexmap", "serde", "serde_derive", "serde_json", @@ -19076,7 +18490,7 @@ dependencies = [ "anyhow", "auditable-serde", "flate2", - "indexmap 2.11.4", + "indexmap", "serde", "serde_derive", "serde_json", @@ -19106,7 +18520,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84e5df6dba6c0d7fafc63a450f1738451ed7a0b52295d83e868218fa286bf708" dependencies = [ "bitflags 2.9.4", - "indexmap 2.11.4", + "indexmap", "semver", ] @@ -19118,7 +18532,7 @@ checksum = "d06bfa36ab3ac2be0dee563380147a5b81ba10dd8885d7fbbc9eb574be67d185" dependencies = [ "bitflags 2.9.4", "hashbrown 0.15.5", - "indexmap 2.11.4", + "indexmap", "semver", "serde", ] @@ -19131,27 +18545,41 @@ checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" dependencies = [ "bitflags 2.9.4", "hashbrown 0.15.5", - "indexmap 2.11.4", + "indexmap", + "semver", +] + +[[package]] +name = "wasmparser" +version = "0.229.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc3b1f053f5d41aa55640a1fa9b6d1b8a9e4418d118ce308d20e24ff3575a8c" +dependencies = [ + "bitflags 2.9.4", + "hashbrown 0.15.5", + "indexmap", "semver", + "serde", ] [[package]] name = "wasmprinter" -version = "0.221.3" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7343c42a97f2926c7819ff81b64012092ae954c5d83ddd30c9fcdefd97d0b283" +checksum = "d25dac01892684a99b8fbfaf670eb6b56edea8a096438c75392daeb83156ae2e" dependencies = [ "anyhow", "termcolor", - "wasmparser 0.221.3", + "wasmparser 0.229.0", ] [[package]] name = "wasmtime" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11976a250672556d1c4c04c6d5d7656ac9192ac9edc42a4587d6c21460010e69" +checksum = "57373e1d8699662fb791270ac5dfac9da5c14f618ecf940cdb29dc3ad9472a3c" dependencies = [ + "addr2line 0.24.2", "anyhow", "async-trait", "bitflags 2.9.4", @@ -19159,20 +18587,19 @@ dependencies = [ "cc", "cfg-if", "encoding_rs", - "hashbrown 0.14.5", - "indexmap 2.11.4", + "hashbrown 0.15.5", + "indexmap", "libc", "log", "mach2 0.4.3", "memfd", "object 0.36.7", "once_cell", - "paste", "postcard", "psm", "pulley-interpreter", "rayon", - "rustix 0.38.44", + "rustix 1.1.2", "semver", "serde", "serde_derive", @@ -19180,7 +18607,7 @@ dependencies = [ "sptr", "target-lexicon 0.13.3", "trait-variant", - "wasmparser 0.221.3", + "wasmparser 0.229.0", "wasmtime-asm-macros", "wasmtime-component-macro", "wasmtime-component-util", @@ -19197,18 +18624,18 @@ dependencies = [ [[package]] name = "wasmtime-asm-macros" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f178b0d125201fbe9f75beaf849bd3e511891f9e45ba216a5b620802ccf64f2" +checksum = "bd0fc91372865167a695dc98d0d6771799a388a7541d3f34e939d0539d6583de" dependencies = [ "cfg-if", ] [[package]] name = "wasmtime-c-api-impl" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea30cef3608f2de5797c7bbb94c1ba4f3676d9a7f81ae86ced1b512e2766ed0c" +checksum = "46db556f1dccdd88e0672bd407162ab0036b72e5eccb0f4398d8251cba32dba1" dependencies = [ "anyhow", "log", @@ -19219,9 +18646,9 @@ dependencies = [ [[package]] name = "wasmtime-c-api-macros" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022a79ebe1124d5d384d82463d7e61c6b4dd857d81f15cb8078974eeb86db65b" +checksum = "315cc6bc8cdc66f296accb26d7625ae64c1c7b6da6f189e8a72ce6594bf7bd36" dependencies = [ "proc-macro2", "quote", @@ -19229,9 +18656,9 @@ dependencies = [ [[package]] name = "wasmtime-component-macro" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d74de6592ed945d0a602f71243982a304d5d02f1e501b638addf57f42d57dfaf" +checksum = "25c9c7526675ff9a9794b115023c4af5128e3eb21389bfc3dc1fd344d549258f" dependencies = [ "anyhow", "proc-macro2", @@ -19239,20 +18666,20 @@ dependencies = [ "syn 2.0.106", "wasmtime-component-util", "wasmtime-wit-bindgen", - "wit-parser 0.221.3", + "wit-parser 0.229.0", ] [[package]] name = "wasmtime-component-util" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707dc7b3c112ab5a366b30cfe2fb5b2f8e6a0f682f16df96a5ec582bfe6f056e" +checksum = "cc42ec8b078875804908d797cb4950fec781d9add9684c9026487fd8eb3f6291" [[package]] name = "wasmtime-cranelift" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "366be722674d4bf153290fbcbc4d7d16895cc82fb3e869f8d550ff768f9e9e87" +checksum = "b2bd72f0a6a0ffcc6a184ec86ac35c174e48ea0e97bbae277c8f15f8bf77a566" dependencies = [ "anyhow", "cfg-if", @@ -19262,29 +18689,30 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "gimli 0.31.1", - "itertools 0.12.1", + "itertools 0.14.0", "log", "object 0.36.7", + "pulley-interpreter", "smallvec", "target-lexicon 0.13.3", - "thiserror 1.0.69", - "wasmparser 0.221.3", + "thiserror 2.0.17", + "wasmparser 0.229.0", "wasmtime-environ", "wasmtime-versioned-export-macros", ] [[package]] name = "wasmtime-environ" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdadc1af7097347aa276a4f008929810f726b5b46946971c660b6d421e9994ad" +checksum = "e6187bb108a23eb25d2a92aa65d6c89fb5ed53433a319038a2558567f3011ff2" dependencies = [ "anyhow", "cpp_demangle", "cranelift-bitset", "cranelift-entity", "gimli 0.31.1", - "indexmap 2.11.4", + "indexmap", "log", "object 0.36.7", "postcard", @@ -19294,22 +18722,22 @@ dependencies = [ "serde_derive", "smallvec", "target-lexicon 0.13.3", - "wasm-encoder 0.221.3", - "wasmparser 0.221.3", + "wasm-encoder 0.229.0", + "wasmparser 0.229.0", "wasmprinter", "wasmtime-component-util", ] [[package]] name = "wasmtime-fiber" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccba90d4119f081bca91190485650730a617be1fff5228f8c4757ce133d21117" +checksum = "dc8965d2128c012329f390e24b8b2758dd93d01bf67e1a1a0dd3d8fd72f56873" dependencies = [ "anyhow", "cc", "cfg-if", - "rustix 0.38.44", + "rustix 1.1.2", "wasmtime-asm-macros", "wasmtime-versioned-export-macros", "windows-sys 0.59.0", @@ -19317,9 +18745,9 @@ dependencies = [ [[package]] name = "wasmtime-jit-icache-coherence" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec5e8552e01692e6c2e5293171704fed8abdec79d1a6995a0870ab190e5747d1" +checksum = "7af0e940cb062a45c0b3f01a926f77da5947149e99beb4e3dd9846d5b8f11619" dependencies = [ "anyhow", "cfg-if", @@ -19329,24 +18757,24 @@ dependencies = [ [[package]] name = "wasmtime-math" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29210ec2aa25e00f4d54605cedaf080f39ec01a872c5bd520ad04c67af1dde17" +checksum = "acfca360e719dda9a27e26944f2754ff2fd5bad88e21919c42c5a5f38ddd93cb" dependencies = [ "libm", ] [[package]] name = "wasmtime-slab" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb5821a96fa04ac14bc7b158bb3d5cd7729a053db5a74dad396cd513a5e5ccf" +checksum = "48e240559cada55c4b24af979d5f6c95e0029f5772f32027ec3c62b258aaff65" [[package]] name = "wasmtime-versioned-export-macros" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ff86db216dc0240462de40c8290887a613dddf9685508eb39479037ba97b5b" +checksum = "d0963c1438357a3d8c0efe152b4ef5259846c1cf8b864340270744fe5b3bae5e" dependencies = [ "proc-macro2", "quote", @@ -19355,9 +18783,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d1be69bfcab1bdac74daa7a1f9695ab992b9c8e21b9b061e7d66434097e0ca4" +checksum = "4ae951b72c7c6749a1c15dcdfb6d940a2614c932b4a54f474636e78e2c744b4c" dependencies = [ "anyhow", "async-trait", @@ -19372,30 +18800,43 @@ dependencies = [ "futures 0.3.31", "io-extras", "io-lifetimes", - "rustix 0.38.44", + "rustix 1.1.2", "system-interface", - "thiserror 1.0.69", + "thiserror 2.0.17", "tokio", "tracing", - "trait-variant", "url", "wasmtime", + "wasmtime-wasi-io", "wiggle", "windows-sys 0.59.0", ] +[[package]] +name = "wasmtime-wasi-io" +version = "33.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a835790dcecc3d7051ec67da52ba9e04af25e1bc204275b9391e3f0042b10797" +dependencies = [ + "anyhow", + "async-trait", + "bytes 1.10.1", + "futures 0.3.31", + "wasmtime", +] + [[package]] name = "wasmtime-winch" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbabfb8f20502d5e1d81092b9ead3682ae59988487aafcd7567387b7a43cf8f" +checksum = "cbc3b117d03d6eeabfa005a880c5c22c06503bb8820f3aa2e30f0e8d87b6752f" dependencies = [ "anyhow", "cranelift-codegen", "gimli 0.31.1", "object 0.36.7", "target-lexicon 0.13.3", - "wasmparser 0.221.3", + "wasmparser 0.229.0", "wasmtime-cranelift", "wasmtime-environ", "winch-codegen", @@ -19403,14 +18844,14 @@ dependencies = [ [[package]] name = "wasmtime-wit-bindgen" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8358319c2dd1e4db79e3c1c5d3a5af84956615343f9f89f4e4996a36816e06e6" +checksum = "1382f4f09390eab0d75d4994d0c3b0f6279f86a571807ec67a8253c87cf6a145" dependencies = [ "anyhow", "heck 0.5.0", - "indexmap 2.11.4", - "wit-parser 0.221.3", + "indexmap", + "wit-parser 0.229.0", ] [[package]] @@ -19434,6 +18875,21 @@ dependencies = [ "zlog", ] +[[package]] +name = "wax" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d12a78aa0bab22d2f26ed1a96df7ab58e8a93506a3e20adb47c51a93b4e1357" +dependencies = [ + "const_format", + "itertools 0.11.0", + "nom 7.1.3", + "pori", + "regex", + "thiserror 1.0.69", + "walkdir", +] + [[package]] name = "wayland-backend" version = "0.3.11" @@ -19677,6 +19133,20 @@ dependencies = [ "winsafe", ] +[[package]] +name = "which_key" +version = "0.1.0" +dependencies = [ + "command_palette", + "gpui", + "serde", + "settings", + "theme", + "ui", + "util", + "workspace", +] + [[package]] name = "whoami" version = "1.6.1" @@ -19689,14 +19159,14 @@ dependencies = [ [[package]] name = "wiggle" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9af35bc9629c52c261465320a9a07959164928b4241980ba1cf923b9e6751d" +checksum = "649c1aca13ef9e9dccf2d5efbbebf12025bc5521c3fb7754355ef60f5eb810be" dependencies = [ "anyhow", "async-trait", "bitflags 2.9.4", - "thiserror 1.0.69", + "thiserror 2.0.17", "tracing", "wasmtime", "wiggle-macro", @@ -19704,24 +19174,23 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cf267dd05673912c8138f4b54acabe6bd53407d9d1536f0fadb6520dd16e101" +checksum = "164870fc34214ee42bd81b8ce9e7c179800fa1a7d4046d17a84e7f7bf422c8ad" dependencies = [ "anyhow", "heck 0.5.0", "proc-macro2", "quote", - "shellexpand 2.1.2", "syn 2.0.106", "witx", ] [[package]] name = "wiggle-macro" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c5c473d4198e6c2d377f3809f713ff0c110cab88a0805ae099a82119ee250c" +checksum = "d873bb5b59ca703b5e41562e96a4796d1af61bf4cf80bf8a7abda755a380ec1c" dependencies = [ "proc-macro2", "quote", @@ -19762,18 +19231,19 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "29.0.1" +version = "33.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f849ef2c5f46cb0a20af4b4487aaa239846e52e2c03f13fa3c784684552859c" +checksum = "7914c296fbcef59d1b89a15e82384d34dc9669bc09763f2ef068a28dd3a64ebf" dependencies = [ "anyhow", + "cranelift-assembler-x64", "cranelift-codegen", "gimli 0.31.1", "regalloc2", "smallvec", "target-lexicon 0.13.3", - "thiserror 1.0.69", - "wasmparser 0.221.3", + "thiserror 2.0.17", + "wasmparser 0.229.0", "wasmtime-cranelift", "wasmtime-environ", ] @@ -20562,7 +20032,7 @@ checksum = "d8a39a15d1ae2077688213611209849cad40e9e5cccf6e61951a425850677ff3" dependencies = [ "anyhow", "heck 0.4.1", - "indexmap 2.11.4", + "indexmap", "wasm-metadata 0.201.0", "wit-bindgen-core 0.22.0", "wit-component 0.201.0", @@ -20576,7 +20046,7 @@ checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" dependencies = [ "anyhow", "heck 0.5.0", - "indexmap 2.11.4", + "indexmap", "prettyplease", "syn 2.0.106", "wasm-metadata 0.227.1", @@ -20621,7 +20091,7 @@ checksum = "421c0c848a0660a8c22e2fd217929a0191f14476b68962afd2af89fd22e39825" dependencies = [ "anyhow", "bitflags 2.9.4", - "indexmap 2.11.4", + "indexmap", "log", "serde", "serde_derive", @@ -20640,7 +20110,7 @@ checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" dependencies = [ "anyhow", "bitflags 2.9.4", - "indexmap 2.11.4", + "indexmap", "log", "serde", "serde_derive", @@ -20659,7 +20129,7 @@ checksum = "196d3ecfc4b759a8573bf86a9b3f8996b304b3732e4c7de81655f875f6efdca6" dependencies = [ "anyhow", "id-arena", - "indexmap 2.11.4", + "indexmap", "log", "semver", "serde", @@ -20671,38 +20141,38 @@ dependencies = [ [[package]] name = "wit-parser" -version = "0.221.3" +version = "0.227.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "896112579ed56b4a538b07a3d16e562d101ff6265c46b515ce0c701eef16b2ac" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" dependencies = [ "anyhow", "id-arena", - "indexmap 2.11.4", + "indexmap", "log", "semver", "serde", "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.221.3", + "wasmparser 0.227.1", ] [[package]] name = "wit-parser" -version = "0.227.1" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +checksum = "459c6ba62bf511d6b5f2a845a2a736822e38059c1cfa0b644b467bbbfae4efa6" dependencies = [ "anyhow", "id-arena", - "indexmap 2.11.4", + "indexmap", "log", "semver", "serde", "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.227.1", + "wasmparser 0.229.0", ] [[package]] @@ -20731,13 +20201,16 @@ dependencies = [ "component", "dap", "db", + "feature_flags", "fs", "futures 0.3.31", + "git", "gpui", "http_client", "itertools 0.14.0", "language", "log", + "markdown", "menu", "node_runtime", "parking_lot", @@ -20745,7 +20218,7 @@ dependencies = [ "pretty_assertions", "project", "remote", - "schemars 1.0.4", + "schemars", "serde", "serde_json", "session", @@ -20771,8 +20244,10 @@ version = "0.1.0" dependencies = [ "anyhow", "async-lock 2.8.0", + "chardetng", "clock", "collections", + "encoding_rs", "fs", "futures 0.3.31", "fuzzy", @@ -20860,7 +20335,7 @@ name = "x_ai" version = "0.1.0" dependencies = [ "anyhow", - "schemars 1.0.4", + "schemars", "serde", "strum 0.27.2", ] @@ -20959,19 +20434,14 @@ dependencies = [ "cargo_toml", "clap", "gh-workflow", - "indexmap 2.11.4", + "indexmap", "indoc", "serde", + "serde_json", "toml 0.8.23", "toml_edit 0.22.27", ] -[[package]] -name = "xxhash-rust" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" - [[package]] name = "yaml-rust2" version = "0.8.1" @@ -21146,12 +20616,13 @@ dependencies = [ [[package]] name = "zed" -version = "0.213.0" +version = "0.219.0" dependencies = [ "acp_tools", "activity_indicator", "agent_settings", "agent_ui", + "agent_ui_v2", "anyhow", "ashpd 0.11.0", "askpass", @@ -21159,11 +20630,11 @@ dependencies = [ "audio", "auto_update", "auto_update_ui", - "backtrace", - "bincode 1.3.3", + "bincode", "breadcrumbs", "call", "channel", + "chrono", "clap", "cli", "client", @@ -21181,7 +20652,8 @@ dependencies = [ "debugger_tools", "debugger_ui", "diagnostics", - "edit_prediction_button", + "edit_prediction", + "edit_prediction_ui", "editor", "env_logger 0.11.8", "extension", @@ -21221,8 +20693,8 @@ dependencies = [ "menu", "migrator", "mimalloc", + "miniprofiler_ui", "nc", - "nix 0.29.0", "node_runtime", "notifications", "onboarding", @@ -21246,6 +20718,7 @@ dependencies = [ "reqwest_client", "rope", "search", + "semver", "serde", "serde_json", "session", @@ -21264,7 +20737,6 @@ dependencies = [ "task", "tasks_ui", "telemetry", - "telemetry_events", "terminal_view", "theme", "theme_extension", @@ -21272,6 +20744,7 @@ dependencies = [ "time", "title_bar", "toolchain_selector", + "tracing", "tree-sitter-md", "tree-sitter-rust", "ui", @@ -21286,17 +20759,16 @@ dependencies = [ "watch", "web_search", "web_search_providers", + "which_key", "windows 0.61.3", "winresource", "workspace", "zed-reqwest", "zed_actions", "zed_env_vars", - "zeta", - "zeta2", - "zeta2_tools", "zlog", "zlog_settings", + "ztracing", ] [[package]] @@ -21411,7 +20883,7 @@ name = "zed_actions" version = "0.1.0" dependencies = [ "gpui", - "schemars 1.0.4", + "schemars", "serde", "uuid", ] @@ -21437,6 +20909,8 @@ dependencies = [ [[package]] name = "zed_extension_api" version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0729d50b4ca0a7e28e590bbe32e3ca0194d97ef654961451a424c661a366fca0" dependencies = [ "serde", "serde_json", @@ -21445,9 +20919,7 @@ dependencies = [ [[package]] name = "zed_extension_api" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0729d50b4ca0a7e28e590bbe32e3ca0194d97ef654961451a424c661a366fca0" +version = "0.8.0" dependencies = [ "serde", "serde_json", @@ -21463,23 +20935,23 @@ dependencies = [ [[package]] name = "zed_html" -version = "0.2.3" +version = "0.3.0" dependencies = [ - "zed_extension_api 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.7.0", ] [[package]] name = "zed_proto" -version = "0.2.2" +version = "0.3.0" dependencies = [ - "zed_extension_api 0.1.0", + "zed_extension_api 0.7.0", ] [[package]] name = "zed_test_extension" version = "0.1.0" dependencies = [ - "zed_extension_api 0.7.0", + "zed_extension_api 0.8.0", ] [[package]] @@ -21607,184 +21079,10 @@ dependencies = [ ] [[package]] -name = "zeta" -version = "0.1.0" -dependencies = [ - "ai_onboarding", - "anyhow", - "arrayvec", - "call", - "client", - "clock", - "cloud_api_types", - "cloud_llm_client", - "collections", - "command_palette_hooks", - "copilot", - "ctor", - "db", - "edit_prediction", - "editor", - "feature_flags", - "fs", - "futures 0.3.31", - "gpui", - "http_client", - "indoc", - "itertools 0.14.0", - "language", - "language_model", - "log", - "menu", - "parking_lot", - "postage", - "project", - "rand 0.9.2", - "regex", - "release_channel", - "reqwest_client", - "rpc", - "serde", - "serde_json", - "settings", - "strum 0.27.2", - "telemetry", - "telemetry_events", - "theme", - "thiserror 2.0.17", - "tree-sitter-go", - "tree-sitter-rust", - "ui", - "util", - "uuid", - "workspace", - "worktree", - "zed_actions", - "zlog", -] - -[[package]] -name = "zeta2" -version = "0.1.0" -dependencies = [ - "anyhow", - "arrayvec", - "chrono", - "client", - "clock", - "cloud_llm_client", - "cloud_zeta2_prompt", - "collections", - "edit_prediction", - "edit_prediction_context", - "feature_flags", - "futures 0.3.31", - "gpui", - "indoc", - "language", - "language_model", - "log", - "lsp", - "open_ai", - "pretty_assertions", - "project", - "release_channel", - "serde", - "serde_json", - "settings", - "smol", - "thiserror 2.0.17", - "util", - "uuid", - "workspace", - "worktree", - "zlog", -] - -[[package]] -name = "zeta2_tools" -version = "0.1.0" -dependencies = [ - "anyhow", - "chrono", - "clap", - "client", - "cloud_llm_client", - "cloud_zeta2_prompt", - "collections", - "edit_prediction_context", - "editor", - "feature_flags", - "futures 0.3.31", - "gpui", - "indoc", - "language", - "log", - "multi_buffer", - "ordered-float 2.10.1", - "pretty_assertions", - "project", - "serde", - "serde_json", - "settings", - "telemetry", - "text", - "ui", - "ui_input", - "util", - "workspace", - "zeta2", - "zlog", -] - -[[package]] -name = "zeta_cli" +name = "zeta_prompt" version = "0.1.0" dependencies = [ - "anyhow", - "chrono", - "clap", - "client", - "cloud_llm_client", - "cloud_zeta2_prompt", - "collections", - "debug_adapter_extension", - "edit_prediction_context", - "extension", - "fs", - "futures 0.3.31", - "gpui", - "gpui_tokio", - "indoc", - "language", - "language_extension", - "language_model", - "language_models", - "languages", - "log", - "node_runtime", - "ordered-float 2.10.1", - "paths", - "polars", - "pretty_assertions", - "project", - "prompt_store", - "pulldown-cmark 0.12.2", - "release_channel", - "reqwest_client", "serde", - "serde_json", - "settings", - "shellexpand 2.1.2", - "smol", - "soa-rs", - "terminal_view", - "toml 0.8.23", - "util", - "watch", - "zeta", - "zeta2", - "zlog", ] [[package]] @@ -21796,7 +21094,7 @@ dependencies = [ "aes", "byteorder", "bzip2", - "constant_time_eq 0.1.5", + "constant_time_eq", "crc32fast", "crossbeam-utils", "flate2", @@ -21804,7 +21102,7 @@ dependencies = [ "pbkdf2 0.11.0", "sha1", "time", - "zstd 0.11.2+zstd.1.5.2", + "zstd", ] [[package]] @@ -21817,17 +21115,11 @@ dependencies = [ "crc32fast", "crossbeam-utils", "displaydoc", - "indexmap 2.11.4", + "indexmap", "num_enum", "thiserror 1.0.69", ] -[[package]] -name = "zlib-rs" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2" - [[package]] name = "zlog" version = "0.1.0" @@ -21855,16 +21147,7 @@ version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ - "zstd-safe 5.0.2+zstd.1.5.2", -] - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe 7.2.4", + "zstd-safe", ] [[package]] @@ -21877,15 +21160,6 @@ dependencies = [ "zstd-sys", ] -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - [[package]] name = "zstd-sys" version = "2.0.16+zstd.1.5.7" @@ -21896,6 +21170,21 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "ztracing" +version = "0.1.0" +dependencies = [ + "tracing", + "tracing-subscriber", + "tracing-tracy", + "zlog", + "ztracing_macro", +] + +[[package]] +name = "ztracing_macro" +version = "0.1.0" + [[package]] name = "zune-core" version = "0.4.12" diff --git a/Cargo.toml b/Cargo.toml index 579bc394e6f13963188f03b1b812a05eec43a7b4..703a34b63af901886e861dba3177e58b19c223f0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "crates/agent_servers", "crates/agent_settings", "crates/agent_ui", + "crates/agent_ui_v2", "crates/ai_onboarding", "crates/anthropic", "crates/askpass", @@ -32,7 +33,6 @@ members = [ "crates/cloud_api_client", "crates/cloud_api_types", "crates/cloud_llm_client", - "crates/cloud_zeta2_prompt", "crates/collab", "crates/collab_ui", "crates/collections", @@ -54,11 +54,12 @@ members = [ "crates/diagnostics", "crates/docs_preprocessor", "crates/edit_prediction", - "crates/edit_prediction_button", + "crates/edit_prediction_types", + "crates/edit_prediction_ui", "crates/edit_prediction_context", - "crates/zeta2_tools", "crates/editor", "crates/eval", + "crates/eval_utils", "crates/explorer_command_injector", "crates/extension", "crates/extension_api", @@ -110,6 +111,7 @@ members = [ "crates/menu", "crates/migrator", "crates/mistral", + "crates/miniprofiler_ui", "crates/multi_buffer", "crates/nc", "crates/net", @@ -126,6 +128,7 @@ members = [ "crates/picker", "crates/prettier", "crates/project", + "crates/project_benchmarks", "crates/project_panel", "crates/project_symbols", "crates/prompt_store", @@ -145,7 +148,6 @@ members = [ "crates/rules_library", "crates/schema_generator", "crates/search", - "crates/semantic_version", "crates/session", "crates/settings", "crates/settings_json", @@ -190,6 +192,7 @@ members = [ "crates/vercel", "crates/vim", "crates/vim_mode_setting", + "crates/which_key", "crates/watch", "crates/web_search", "crates/web_search_providers", @@ -199,11 +202,12 @@ members = [ "crates/zed", "crates/zed_actions", "crates/zed_env_vars", - "crates/zeta", - "crates/zeta2", - "crates/zeta_cli", + "crates/edit_prediction_cli", + "crates/zeta_prompt", "crates/zlog", "crates/zlog_settings", + "crates/ztracing", + "crates/ztracing_macro", # # Extensions @@ -240,9 +244,9 @@ action_log = { path = "crates/action_log" } agent = { path = "crates/agent" } activity_indicator = { path = "crates/activity_indicator" } agent_ui = { path = "crates/agent_ui" } +agent_ui_v2 = { path = "crates/agent_ui_v2" } agent_settings = { path = "crates/agent_settings" } agent_servers = { path = "crates/agent_servers" } -ai = { path = "crates/ai" } ai_onboarding = { path = "crates/ai_onboarding" } anthropic = { path = "crates/anthropic" } askpass = { path = "crates/askpass" } @@ -252,7 +256,6 @@ assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_slash_commands = { path = "crates/assistant_slash_commands" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } -auto_update_helper = { path = "crates/auto_update_helper" } auto_update_ui = { path = "crates/auto_update_ui" } aws_http_client = { path = "crates/aws_http_client" } bedrock = { path = "crates/bedrock" } @@ -266,8 +269,6 @@ clock = { path = "crates/clock" } cloud_api_client = { path = "crates/cloud_api_client" } cloud_api_types = { path = "crates/cloud_api_types" } cloud_llm_client = { path = "crates/cloud_llm_client" } -cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" } -collab = { path = "crates/collab" } collab_ui = { path = "crates/collab_ui" } collections = { path = "crates/collections", version = "0.1.0" } command_palette = { path = "crates/command_palette" } @@ -288,6 +289,7 @@ deepseek = { path = "crates/deepseek" } derive_refineable = { path = "crates/refineable/derive_refineable" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } +eval_utils = { path = "crates/eval_utils" } extension = { path = "crates/extension" } extension_host = { path = "crates/extension_host" } extensions_ui = { path = "crates/extensions_ui" } @@ -311,10 +313,9 @@ http_client = { path = "crates/http_client" } http_client_tls = { path = "crates/http_client_tls" } icons = { path = "crates/icons" } image_viewer = { path = "crates/image_viewer" } -edit_prediction = { path = "crates/edit_prediction" } -edit_prediction_button = { path = "crates/edit_prediction_button" } +edit_prediction_types = { path = "crates/edit_prediction_types" } +edit_prediction_ui = { path = "crates/edit_prediction_ui" } edit_prediction_context = { path = "crates/edit_prediction_context" } -zeta2_tools = { path = "crates/zeta2_tools" } inspector_ui = { path = "crates/inspector_ui" } install_cli = { path = "crates/install_cli" } journal = { path = "crates/journal" } @@ -341,6 +342,7 @@ menu = { path = "crates/menu" } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } +miniprofiler_ui = { path = "crates/miniprofiler_ui" } nc = { path = "crates/nc" } net = { path = "crates/net" } node_runtime = { path = "crates/node_runtime" } @@ -355,8 +357,6 @@ panel = { path = "crates/panel" } paths = { path = "crates/paths" } perf = { path = "tooling/perf" } picker = { path = "crates/picker" } -plugin = { path = "crates/plugin" } -plugin_macros = { path = "crates/plugin_macros" } prettier = { path = "crates/prettier" } settings_profile_selector = { path = "crates/settings_profile_selector" } project = { path = "crates/project" } @@ -367,18 +367,15 @@ proto = { path = "crates/proto" } recent_projects = { path = "crates/recent_projects" } refineable = { path = "crates/refineable" } release_channel = { path = "crates/release_channel" } -scheduler = { path = "crates/scheduler" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } reqwest_client = { path = "crates/reqwest_client" } -rich_text = { path = "crates/rich_text" } rodio = { git = "https://github.com/RustAudio/rodio", rev ="e2074c6c2acf07b57cf717e076bdda7a9ac6e70b", features = ["wav", "playback", "wav_output", "recording"] } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } rules_library = { path = "crates/rules_library" } search = { path = "crates/search" } -semantic_version = { path = "crates/semantic_version" } session = { path = "crates/session" } settings = { path = "crates/settings" } settings_json = { path = "crates/settings_json" } @@ -390,7 +387,6 @@ snippets_ui = { path = "crates/snippets_ui" } sqlez = { path = "crates/sqlez" } sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } -storybook = { path = "crates/storybook" } streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } supermaven = { path = "crates/supermaven" } @@ -407,7 +403,6 @@ terminal_view = { path = "crates/terminal_view" } text = { path = "crates/text" } theme = { path = "crates/theme" } theme_extension = { path = "crates/theme_extension" } -theme_importer = { path = "crates/theme_importer" } theme_selector = { path = "crates/theme_selector" } time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } @@ -421,6 +416,7 @@ util_macros = { path = "crates/util_macros" } vercel = { path = "crates/vercel" } vim = { path = "crates/vim" } vim_mode_setting = { path = "crates/vim_mode_setting" } +which_key = { path = "crates/which_key" } watch = { path = "crates/watch" } web_search = { path = "crates/web_search" } @@ -431,16 +427,18 @@ x_ai = { path = "crates/x_ai" } zed = { path = "crates/zed" } zed_actions = { path = "crates/zed_actions" } zed_env_vars = { path = "crates/zed_env_vars" } -zeta = { path = "crates/zeta" } -zeta2 = { path = "crates/zeta2" } +edit_prediction = { path = "crates/edit_prediction" } +zeta_prompt = { path = "crates/zeta_prompt" } zlog = { path = "crates/zlog" } zlog_settings = { path = "crates/zlog_settings" } +ztracing = { path = "crates/ztracing" } +ztracing_macro = { path = "crates/ztracing_macro" } # # External crates # -agent-client-protocol = { version = "0.7.0", features = ["unstable"] } +agent-client-protocol = { version = "=0.9.0", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = "0.25.1-rc1" any_vec = "0.14" @@ -458,16 +456,16 @@ async-tar = "0.5.1" async-task = "4.7" async-trait = "0.1" async-tungstenite = "0.31.0" -async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } -aws-config = { version = "1.6.1", features = ["behavior-version-latest"] } -aws-credential-types = { version = "1.2.2", features = [ +async_zip = { version = "0.0.18", features = ["deflate", "deflate64"] } +aws-config = { version = "1.8.10", features = ["behavior-version-latest"] } +aws-credential-types = { version = "1.2.8", features = [ "hardcoded-credentials", ] } -aws-sdk-bedrockruntime = { version = "1.80.0", features = [ +aws-sdk-bedrockruntime = { version = "1.112.0", features = [ "behavior-version-latest", ] } -aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] } -aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] } +aws-smithy-runtime-api = { version = "1.9.2", features = ["http-1x", "client"] } +aws-smithy-types = { version = "1.3.4", features = ["http-body-1-x"] } backtrace = "0.3" base64 = "0.22" bincode = "1.2.1" @@ -475,14 +473,16 @@ bitflags = "2.6.0" blade-graphics = { version = "0.7.0" } blade-macros = { version = "0.3.0" } blade-util = { version = "0.3.0" } +brotli = "8.0.2" bytes = "1.0" cargo_metadata = "0.19" cargo_toml = "0.21" cfg-if = "1.0.3" +chardetng = "0.1" chrono = { version = "0.4", features = ["serde"] } ciborium = "0.2" circular-buffer = "1.0" -clap = { version = "4.4", features = ["derive"] } +clap = { version = "4.4", features = ["derive", "wrap_help"] } cocoa = "=0.26.0" cocoa-foundation = "=0.2.0" convert_case = "0.8.0" @@ -502,17 +502,16 @@ dotenvy = "0.15.0" ec4rs = "1.1" emojis = "0.6.1" env_logger = "0.11" +encoding_rs = "0.8" exec = "0.3.1" -fancy-regex = "0.14.0" -fork = "0.2.0" +fancy-regex = "0.16.0" +fork = "0.4.0" futures = "0.3" -futures-batch = "0.6.1" futures-lite = "1.13" -gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "3eaa84abca0778eb54272f45a312cb24f9a0b435" } +gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "09acfdf2bd5c1d6254abefd609c808ff73547b2c" } git2 = { version = "0.20.1", default-features = false } globset = "0.4" handlebars = "4.3" -hashbrown = "0.15.3" heck = "0.5" heed = { version = "0.21.0", features = ["read-txn-no-tls"] } hex = "0.4.3" @@ -529,10 +528,10 @@ indoc = "2" inventory = "0.3.19" itertools = "0.14.0" json_dotpath = "1.1" -jsonschema = "0.30.0" +jsonschema = "0.37.0" jsonwebtoken = "9.3" -jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" } -jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" } +jupyter-protocol = "0.10.0" +jupyter-websocket-client = "0.15.0" libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" @@ -545,10 +544,9 @@ minidumper = "0.8" moka = { version = "0.12.10", features = ["sync"] } naga = { version = "25.0", features = ["wgsl-in"] } nanoid = "0.4" -nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" } +nbformat = "0.15.0" nix = "0.29" num-format = "0.4.4" -num-traits = "0.2" objc = "0.2" objc2-foundation = { version = "=0.3.1", default-features = false, features = [ "NSArray", @@ -583,14 +581,13 @@ partial-json-fixer = "0.5.3" parse_int = "0.9" pciid-parser = "0.8.0" pathdiff = "0.2" -pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } -pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } +pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" } portable-pty = "0.9.0" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } @@ -603,7 +600,6 @@ pulldown-cmark = { version = "0.12.0", default-features = false } quote = "1.0.9" rand = "0.9" rayon = "1.8" -ref-cast = "1.0.24" regex = "1.5" # WARNING: If you change this, you must also publish a new version of zed-reqwest to crates.io reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662463bda39148ba154100dd44d3fba5873a4", default-features = false, features = [ @@ -616,8 +612,8 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662 "stream", ], package = "zed-reqwest", version = "0.12.15-zed" } rsa = "0.9.6" -runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [ - "async-dispatcher-runtime", +runtimelib = { version = "0.30.0", default-features = false, features = [ + "async-dispatcher-runtime", "aws-lc-rs" ] } rust-embed = { version = "8.4", features = ["include-exclude"] } rustc-hash = "2.1.0" @@ -626,7 +622,7 @@ rustls-platform-verifier = "0.5.0" # WARNING: If you change this, you must also publish a new version of zed-scap to crates.io scap = { git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", default-features = false, package = "zed-scap", version = "0.0.8-zed" } schemars = { version = "1.0", features = ["indexmap2"] } -semver = "1.0" +semver = { version = "1.0", features = ["serde"] } serde = { version = "1.0.221", features = ["derive", "rc"] } serde_json = { version = "1.0.144", features = ["preserve_order", "raw_value"] } serde_json_lenient = { version = "0.2", features = [ @@ -636,13 +632,12 @@ serde_json_lenient = { version = "0.2", features = [ serde_path_to_error = "0.1.17" serde_repr = "0.1" serde_urlencoded = "0.7" -serde_with = "3.4.0" sha2 = "0.10" shellexpand = "2.1.0" shlex = "1.3.0" simplelog = "0.12.2" slotmap = "1.0.6" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { version = "1.6", features = ["union", "const_new"] } smol = "2.0" sqlformat = "0.2" stacksafe = "0.1" @@ -656,7 +651,7 @@ sysinfo = "0.37.0" take-until = "0.2.0" tempfile = "3.20.0" thiserror = "2.0.12" -tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" } +tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "2570c4387a8505fb8f1d3f3557454b474f1e8271" } time = { version = "0.3", features = [ "macros", "parsing", @@ -668,11 +663,12 @@ time = { version = "0.3", features = [ tiny_http = "0.8" tokio = { version = "1" } tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] } +tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io", "tokio"] } toml = "0.8" toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] } tower-http = "0.4.4" -tree-sitter = { version = "0.25.10", features = ["wasm"] } -tree-sitter-bash = "0.25.0" +tree-sitter = { version = "0.26", features = ["wasm"] } +tree-sitter-bash = "0.25.1" tree-sitter-c = "0.23" tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" } tree-sitter-css = "0.23" @@ -694,6 +690,7 @@ tree-sitter-ruby = "0.23" tree-sitter-rust = "0.24" tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347 tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } +tracing = "0.1.40" unicase = "2.6" unicode-script = "0.5.7" unicode-segmentation = "1.10" @@ -704,7 +701,7 @@ uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] } walkdir = "2.5" wasm-encoder = "0.221" wasmparser = "0.221" -wasmtime = { version = "29", default-features = false, features = [ +wasmtime = { version = "33", default-features = false, features = [ "async", "demangle", "runtime", @@ -713,14 +710,15 @@ wasmtime = { version = "29", default-features = false, features = [ "incremental-cache", "parallel-compilation", ] } -wasmtime-wasi = "29" +wasmtime-wasi = "33" +wax = "0.6" which = "6.0.0" windows-core = "0.61" -wit-component = "0.221" yawc = "0.2.5" zeroize = "1.8" zstd = "0.11" + [workspace.dependencies.windows] version = "0.61" features = [ @@ -776,6 +774,7 @@ features = [ notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" } notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } +calloop = { git = "https://github.com/zed-industries/calloop" } [profile.dev] split-debuginfo = "unpacked" @@ -789,14 +788,20 @@ codegen-units = 16 codegen-units = 16 [profile.dev.package] +# proc-macros start +gpui_macros = { opt-level = 3 } +derive_refineable = { opt-level = 3 } +settings_macros = { opt-level = 3 } +sqlez_macros = { opt-level = 3, codegen-units = 1 } +ui_macros = { opt-level = 3 } +util_macros = { opt-level = 3 } +quote = { opt-level = 3 } +syn = { opt-level = 3 } +proc-macro2 = { opt-level = 3 } +# proc-macros end + taffy = { opt-level = 3 } -cranelift-codegen = { opt-level = 3 } -cranelift-codegen-meta = { opt-level = 3 } -cranelift-codegen-shared = { opt-level = 3 } resvg = { opt-level = 3 } -rustybuzz = { opt-level = 3 } -ttf-parser = { opt-level = 3 } -wasmtime-cranelift = { opt-level = 3 } wasmtime = { opt-level = 3 } # Build single-source-file crates with cg=1 as it helps make `cargo build` of a whole workspace a bit faster activity_indicator = { codegen-units = 1 } @@ -805,12 +810,11 @@ breadcrumbs = { codegen-units = 1 } collections = { codegen-units = 1 } command_palette = { codegen-units = 1 } command_palette_hooks = { codegen-units = 1 } -extension_cli = { codegen-units = 1 } feature_flags = { codegen-units = 1 } file_icons = { codegen-units = 1 } fsevent = { codegen-units = 1 } image_viewer = { codegen-units = 1 } -edit_prediction_button = { codegen-units = 1 } +edit_prediction_ui = { codegen-units = 1 } install_cli = { codegen-units = 1 } journal = { codegen-units = 1 } json_schema_store = { codegen-units = 1 } @@ -825,12 +829,9 @@ project_symbols = { codegen-units = 1 } refineable = { codegen-units = 1 } release_channel = { codegen-units = 1 } reqwest_client = { codegen-units = 1 } -rich_text = { codegen-units = 1 } -semantic_version = { codegen-units = 1 } session = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } -sqlez_macros = { codegen-units = 1 } story = { codegen-units = 1 } supermaven_api = { codegen-units = 1 } telemetry_events = { codegen-units = 1 } @@ -860,8 +861,6 @@ unexpected_cfgs = { level = "allow" } dbg_macro = "deny" todo = "deny" -# This is not a style lint, see https://github.com/rust-lang/rust-clippy/pull/15454 -# Remove when the lint gets promoted to `suspicious`. declare_interior_mutable_const = "deny" redundant_clone = "deny" diff --git a/Dockerfile-collab b/Dockerfile-collab index a85fe93f198475534cb7396abe594f9d02eeb57b..188e7daddfb471c41b237ca75469355cfc866ae3 100644 --- a/Dockerfile-collab +++ b/Dockerfile-collab @@ -1,6 +1,6 @@ # syntax = docker/dockerfile:1.2 -FROM rust:1.90-bookworm as builder +FROM rust:1.92-bookworm as builder WORKDIR app COPY . . @@ -34,8 +34,4 @@ RUN apt-get update; \ linux-perf binutils WORKDIR app COPY --from=builder /app/collab /app/collab -COPY --from=builder /app/crates/collab/migrations /app/migrations -COPY --from=builder /app/crates/collab/migrations_llm /app/migrations_llm -ENV MIGRATIONS_PATH=/app/migrations -ENV LLM_DATABASE_MIGRATIONS_PATH=/app/migrations_llm ENTRYPOINT ["/app/collab"] diff --git a/README.md b/README.md index d1e2a75beccc9b115bd3b2e09bcc812aebc98329..d3a5fd20526e5eae6826241dce2bb94e8533ecb3 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of ### Installation -On macOS, Linux, and Windows you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager). +On macOS, Linux, and Windows you can [download Zed directly](https://zed.dev/download) or install Zed via your local package manager ([macOS](https://zed.dev/docs/installation#macos)/[Linux](https://zed.dev/docs/linux#installing-via-a-package-manager)/[Windows](https://zed.dev/docs/windows#package-managers)). Other platforms are not yet available: diff --git a/REVIEWERS.conl b/REVIEWERS.conl index 230e6e4d3cc6e7cf51339ea84bfd1dac5e065c12..bca694d7a06fe1112f7f8bab158dad63a365ea74 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -28,7 +28,7 @@ ai = @rtfeldman audio - = @dvdsk + = @yara-blue crashes = @p1n3appl3 @@ -43,7 +43,9 @@ design = @danilo-leal docs + = @miguelraz = @probably-neb + = @yeskunall extension = @kubkon @@ -51,6 +53,10 @@ extension git = @cole-miller = @danilo-leal + = @yara-blue + = @kubkon + = @Anthony-Eid + = @cameron1024 gpui = @Anthony-Eid @@ -70,7 +76,7 @@ languages linux = @cole-miller - = @dvdsk + = @yara-blue = @p1n3appl3 = @probably-neb = @smitbarmase @@ -86,7 +92,7 @@ multi_buffer = @SomeoneToIgnore pickers - = @dvdsk + = @yara-blue = @p1n3appl3 = @SomeoneToIgnore @@ -98,6 +104,12 @@ settings_ui = @danilo-leal = @probably-neb +sum_tree + = @Veykril + +support + = @miguelraz + tasks = @SomeoneToIgnore = @Veykril @@ -106,6 +118,9 @@ terminal = @kubkon = @Veykril +text + = @Veykril + vim = @ConradIrwin = @dinocosta @@ -115,3 +130,4 @@ vim windows = @localcc = @reflectronic + = @Veykril diff --git a/assets/icons/at_sign.svg b/assets/icons/at_sign.svg new file mode 100644 index 0000000000000000000000000000000000000000..531c10c8dc151fb27f2a53d424ab57acecd7d03c --- /dev/null +++ b/assets/icons/at_sign.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/box.svg b/assets/icons/box.svg new file mode 100644 index 0000000000000000000000000000000000000000..7e1276c629fb8bdc5a7ed48d9e2de6369d4c2bb0 --- /dev/null +++ b/assets/icons/box.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/debug_step_back.svg b/assets/icons/debug_step_back.svg deleted file mode 100644 index 61d45866f61cbabbd9a7ae9975809d342cb76ed5..0000000000000000000000000000000000000000 --- a/assets/icons/debug_step_back.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/debug_step_into.svg b/assets/icons/debug_step_into.svg index 9a517fc7ca0762b17446a75cd90f39a91e1b51cf..0a5882354380b659425fecca2b4c6000516e422f 100644 --- a/assets/icons/debug_step_into.svg +++ b/assets/icons/debug_step_into.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/debug_step_out.svg b/assets/icons/debug_step_out.svg index 147a44f930f34f6c3ddce94693a178a932129cb5..c128f56111f2b68d7229f9d2f61b6b2496f99bba 100644 --- a/assets/icons/debug_step_out.svg +++ b/assets/icons/debug_step_out.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/debug_step_over.svg b/assets/icons/debug_step_over.svg index 336abc11deb866a128e8418dab47af01b6e4d3f6..5d8ccd5b7a20b2f8a108ab4c2e03694db4f6f8a8 100644 --- a/assets/icons/debug_step_over.svg +++ b/assets/icons/debug_step_over.svg @@ -1 +1,5 @@ - + + + + + diff --git a/assets/icons/file_icons/odin.svg b/assets/icons/file_icons/odin.svg new file mode 100644 index 0000000000000000000000000000000000000000..3b4ef8931943eb30279a998010e8df0632b08cf4 --- /dev/null +++ b/assets/icons/file_icons/odin.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/icons/git_branch_plus.svg b/assets/icons/git_branch_plus.svg new file mode 100644 index 0000000000000000000000000000000000000000..cf60ce66b4086ba57ef4c2e56f3554d548e863fc --- /dev/null +++ b/assets/icons/git_branch_plus.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/assets/icons/inception.svg b/assets/icons/inception.svg new file mode 100644 index 0000000000000000000000000000000000000000..77a96c0b390ab9f2fe89143c2a89ba916000fabc --- /dev/null +++ b/assets/icons/inception.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/assets/icons/sweep_ai.svg b/assets/icons/sweep_ai.svg new file mode 100644 index 0000000000000000000000000000000000000000..bf3459c7ea9896bc6c1d2297d1f7671cfc8a4d46 --- /dev/null +++ b/assets/icons/sweep_ai.svg @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/assets/icons/zed_agent_two.svg b/assets/icons/zed_agent_two.svg new file mode 100644 index 0000000000000000000000000000000000000000..c352be84d2f1bea6da1f6a5be70b9420f019b6d6 --- /dev/null +++ b/assets/icons/zed_agent_two.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 57c799d8fd4f74478f5fdf469ff142e0c26e4503..ec21bc152edf969f57ac341e4b92f78c9e5da11a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -25,7 +25,8 @@ "ctrl-shift-w": "workspace::CloseWindow", "shift-escape": "workspace::ToggleZoom", "open": "workspace::Open", - "ctrl-o": "workspace::Open", + "ctrl-o": "workspace::OpenFiles", + "ctrl-k ctrl-o": "workspace::Open", "ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], @@ -41,18 +42,18 @@ "ctrl-f11": "debugger::StepInto", "shift-f11": "debugger::StepOut", "f11": "zed::ToggleFullScreen", - "ctrl-alt-z": "edit_prediction::RateCompletions", + "ctrl-alt-z": "edit_prediction::RatePredictions", "ctrl-alt-shift-i": "edit_prediction::ToggleMenu", "ctrl-alt-l": "lsp_tool::ToggleMenu", - "ctrl-alt-.": "project_panel::ToggleHideHidden" - } + "ctrl-alt-shift-s": "workspace::ToggleWorktreeSecurity", + }, }, { "context": "Picker || menu", "bindings": { "up": "menu::SelectPrevious", - "down": "menu::SelectNext" - } + "down": "menu::SelectNext", + }, }, { "context": "Editor", @@ -63,7 +64,6 @@ "delete": "editor::Delete", "tab": "editor::Tab", "shift-tab": "editor::Backtab", - "ctrl-k": "editor::CutToEndOfLine", "ctrl-k ctrl-q": "editor::Rewrap", "ctrl-k q": "editor::Rewrap", "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }], @@ -125,8 +125,8 @@ "shift-f10": "editor::OpenContextMenu", "ctrl-alt-shift-e": "editor::ToggleEditPrediction", "f9": "editor::ToggleBreakpoint", - "shift-f9": "editor::EditLogBreakpoint" - } + "shift-f9": "editor::EditLogBreakpoint", + }, }, { "context": "Editor && mode == full", @@ -145,44 +145,44 @@ "ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-alt-backspace": "editor::GoToNextChange", - "alt-enter": "editor::OpenSelectionsInMultibuffer" - } + "alt-enter": "editor::OpenSelectionsInMultibuffer", + }, }, { "context": "Editor && mode == full && edit_prediction", "bindings": { "alt-]": "editor::NextEditPrediction", - "alt-[": "editor::PreviousEditPrediction" - } + "alt-[": "editor::PreviousEditPrediction", + }, }, { "context": "Editor && !edit_prediction", "bindings": { - "alt-\\": "editor::ShowEditPrediction" - } + "alt-\\": "editor::ShowEditPrediction", + }, }, { "context": "Editor && mode == auto_height", "bindings": { "ctrl-enter": "editor::Newline", "shift-enter": "editor::Newline", - "ctrl-shift-enter": "editor::NewlineBelow" - } + "ctrl-shift-enter": "editor::NewlineBelow", + }, }, { "context": "Markdown", "bindings": { "copy": "markdown::Copy", "ctrl-insert": "markdown::Copy", - "ctrl-c": "markdown::Copy" - } + "ctrl-c": "markdown::Copy", + }, }, { "context": "Editor && jupyter && !ContextEditor", "bindings": { "ctrl-shift-enter": "repl::Run", - "ctrl-alt-enter": "repl::RunInPlace" - } + "ctrl-alt-enter": "repl::RunInPlace", + }, }, { "context": "Editor && !agent_diff", @@ -190,8 +190,8 @@ "ctrl-k ctrl-r": "git::Restore", "ctrl-alt-y": "git::ToggleStaged", "alt-y": "git::StageAndNext", - "alt-shift-y": "git::UnstageAndNext" - } + "alt-shift-y": "git::UnstageAndNext", + }, }, { "context": "Editor && editor_agent_diff", @@ -200,8 +200,8 @@ "ctrl-n": "agent::Reject", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll", - "shift-ctrl-r": "agent::OpenAgentDiff" - } + "shift-ctrl-r": "agent::OpenAgentDiff", + }, }, { "context": "AgentDiff", @@ -209,8 +209,8 @@ "ctrl-y": "agent::Keep", "ctrl-n": "agent::Reject", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "ContextEditor > Editor", @@ -226,8 +226,8 @@ "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPreviousMatch", - "ctrl-k l": "agent::OpenRulesLibrary" - } + "ctrl-k l": "agent::OpenRulesLibrary", + }, }, { "context": "AgentPanel", @@ -240,50 +240,49 @@ "ctrl-alt-l": "agent::OpenRulesLibrary", "ctrl-i": "agent::ToggleProfileSelector", "ctrl-alt-/": "agent::ToggleModelSelector", - "ctrl-shift-a": "agent::ToggleContextPicker", "ctrl-shift-j": "agent::ToggleNavigationMenu", "ctrl-alt-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl->": "agent::AddSelectionToThread", - "ctrl-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", "super-ctrl-b": "agent::ToggleBurnMode", "alt-enter": "agent::ContinueWithBurnMode", "ctrl-y": "agent::AllowOnce", "ctrl-alt-y": "agent::AllowAlways", - "ctrl-alt-z": "agent::RejectOnce" - } + "ctrl-alt-z": "agent::RejectOnce", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AgentPanel > NavigationMenu", "bindings": { - "shift-backspace": "agent::DeleteRecentlyOpenThread" - } + "shift-backspace": "agent::DeleteRecentlyOpenThread", + }, }, { "context": "AgentPanel > Markdown", "bindings": { "copy": "markdown::CopyAsMarkdown", "ctrl-insert": "markdown::CopyAsMarkdown", - "ctrl-c": "markdown::CopyAsMarkdown" - } + "ctrl-c": "markdown::CopyAsMarkdown", + }, }, { "context": "AgentPanel && text_thread", "bindings": { "ctrl-n": "agent::NewTextThread", - "ctrl-alt-t": "agent::NewThread" - } + "ctrl-alt-t": "agent::NewThread", + }, }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewExternalAgentThread", - "ctrl-alt-t": "agent::NewThread" - } + "ctrl-alt-t": "agent::NewThread", + }, }, { "context": "MessageEditor && !Picker > Editor && !use_modifier_to_send", @@ -293,8 +292,8 @@ "ctrl-i": "agent::ToggleProfileSelector", "shift-ctrl-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "MessageEditor && !Picker > Editor && use_modifier_to_send", @@ -304,41 +303,30 @@ "ctrl-i": "agent::ToggleProfileSelector", "shift-ctrl-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "EditMessageEditor > Editor", "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AgentFeedbackMessageEditor > Editor", "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } - }, - { - "context": "ContextStrip", - "bindings": { - "up": "agent::FocusUp", - "right": "agent::FocusRight", - "left": "agent::FocusLeft", - "down": "agent::FocusDown", - "backspace": "agent::RemoveFocusedContext", - "enter": "agent::AcceptSuggestedContext" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AcpThread > ModeSelector", "bindings": { - "ctrl-enter": "menu::Confirm" - } + "ctrl-enter": "menu::Confirm", + }, }, { "context": "AcpThread > Editor && !use_modifier_to_send", @@ -347,8 +335,8 @@ "enter": "agent::Chat", "shift-ctrl-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "AcpThread > Editor && use_modifier_to_send", @@ -358,14 +346,15 @@ "shift-ctrl-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll", - "shift-tab": "agent::CycleModeSelector" - } + "shift-tab": "agent::CycleModeSelector", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "ThreadHistory", "bindings": { - "backspace": "agent::RemoveSelectedThread" - } + "backspace": "agent::RemoveSelectedThread", + }, }, { "context": "RulesLibrary", @@ -373,8 +362,8 @@ "new": "rules_library::NewRule", "ctrl-n": "rules_library::NewRule", "ctrl-shift-s": "rules_library::ToggleDefaultRule", - "ctrl-w": "workspace::CloseWindow" - } + "ctrl-w": "workspace::CloseWindow", + }, }, { "context": "BufferSearchBar", @@ -387,22 +376,22 @@ "find": "search::FocusSearch", "ctrl-f": "search::FocusSearch", "ctrl-h": "search::ToggleReplace", - "ctrl-l": "search::ToggleSelection" - } + "ctrl-l": "search::ToggleSelection", + }, }, { "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", - "ctrl-enter": "search::ReplaceAll" - } + "ctrl-enter": "search::ReplaceAll", + }, }, { "context": "BufferSearchBar && !in_replace > Editor", "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar", @@ -413,22 +402,22 @@ "ctrl-shift-f": "search::FocusSearch", "ctrl-shift-h": "search::ToggleReplace", "alt-ctrl-g": "search::ToggleRegex", - "alt-ctrl-x": "search::ToggleRegex" - } + "alt-ctrl-x": "search::ToggleRegex", + }, }, { "context": "ProjectSearchBar > Editor", "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", - "ctrl-alt-enter": "search::ReplaceAll" - } + "ctrl-alt-enter": "search::ReplaceAll", + }, }, { "context": "ProjectSearchView", @@ -436,8 +425,8 @@ "escape": "project_search::ToggleFocus", "ctrl-shift-h": "search::ToggleReplace", "alt-ctrl-g": "search::ToggleRegex", - "alt-ctrl-x": "search::ToggleRegex" - } + "alt-ctrl-x": "search::ToggleRegex", + }, }, { "context": "Pane", @@ -486,8 +475,8 @@ "ctrl-alt-shift-r": "search::ToggleRegex", "ctrl-alt-shift-x": "search::ToggleRegex", "alt-r": "search::ToggleRegex", - "ctrl-k shift-enter": "pane::TogglePinTab" - } + "ctrl-k shift-enter": "pane::TogglePinTab", + }, }, // Bindings from VS Code { @@ -514,6 +503,7 @@ "ctrl-k ctrl-i": "editor::Hover", "ctrl-k ctrl-b": "editor::BlameHover", "ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }], + "ctrl-k ctrl-c": ["editor::ToggleComments", { "advance_downwards": false }], "f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }], "shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }], "f2": "editor::Rename", @@ -550,31 +540,31 @@ "ctrl-\\": "pane::SplitRight", "ctrl-alt-shift-c": "editor::DisplayCursorNames", "alt-.": "editor::GoToHunk", - "alt-,": "editor::GoToPreviousHunk" - } + "alt-,": "editor::GoToPreviousHunk", + }, }, { "context": "Editor && extension == md", "use_key_equivalents": true, "bindings": { "ctrl-k v": "markdown::OpenPreviewToTheSide", - "ctrl-shift-v": "markdown::OpenPreview" - } + "ctrl-shift-v": "markdown::OpenPreview", + }, }, { "context": "Editor && extension == svg", "use_key_equivalents": true, "bindings": { "ctrl-k v": "svg::OpenPreviewToTheSide", - "ctrl-shift-v": "svg::OpenPreview" - } + "ctrl-shift-v": "svg::OpenPreview", + }, }, { "context": "Editor && mode == full", "bindings": { "ctrl-shift-o": "outline::Toggle", - "ctrl-g": "go_to_line::Toggle" - } + "ctrl-g": "go_to_line::Toggle", + }, }, { "context": "Workspace", @@ -630,8 +620,8 @@ "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", - "ctrl-tab": "tab_switcher::Toggle", "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }], + "ctrl-tab": "tab_switcher::Toggle", "ctrl-e": "file_finder::Toggle", "f1": "command_palette::Toggle", "ctrl-shift-p": "command_palette::Toggle", @@ -668,28 +658,28 @@ // "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }], "f5": "debugger::Rerun", "ctrl-f4": "workspace::CloseActiveDock", - "ctrl-w": "workspace::CloseActiveDock" - } + "ctrl-w": "workspace::CloseActiveDock", + }, }, { "context": "Workspace && debugger_running", "bindings": { - "f5": "zed::NoAction" - } + "f5": "zed::NoAction", + }, }, { "context": "Workspace && debugger_stopped", "bindings": { - "f5": "debugger::Continue" - } + "f5": "debugger::Continue", + }, }, { "context": "ApplicationMenu", "bindings": { "f10": "menu::Cancel", "left": "app_menu::ActivateMenuLeft", - "right": "app_menu::ActivateMenuRight" - } + "right": "app_menu::ActivateMenuRight", + }, }, // Bindings from Sublime Text { @@ -707,8 +697,8 @@ "ctrl-alt-shift-left": "editor::SelectToPreviousSubwordStart", "ctrl-alt-shift-b": "editor::SelectToPreviousSubwordStart", "ctrl-alt-shift-right": "editor::SelectToNextSubwordEnd", - "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd" - } + "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd", + }, }, // Bindings from Atom { @@ -717,37 +707,37 @@ "ctrl-k up": "pane::SplitUp", "ctrl-k down": "pane::SplitDown", "ctrl-k left": "pane::SplitLeft", - "ctrl-k right": "pane::SplitRight" - } + "ctrl-k right": "pane::SplitRight", + }, }, // Bindings that should be unified with bindings for more general actions { "context": "Editor && renaming", "bindings": { - "enter": "editor::ConfirmRename" - } + "enter": "editor::ConfirmRename", + }, }, { "context": "Editor && showing_completions", "bindings": { "enter": "editor::ConfirmCompletion", "shift-enter": "editor::ConfirmCompletionReplace", - "tab": "editor::ComposeCompletion" - } + "tab": "editor::ComposeCompletion", + }, }, { "context": "Editor && in_snippet && has_next_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "tab": "editor::NextSnippetTabstop" - } + "tab": "editor::NextSnippetTabstop", + }, }, { "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "shift-tab": "editor::PreviousSnippetTabstop" - } + "shift-tab": "editor::PreviousSnippetTabstop", + }, }, // Bindings for accepting edit predictions // @@ -759,22 +749,24 @@ "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", "tab": "editor::AcceptEditPrediction", - "alt-right": "editor::AcceptPartialEditPrediction" - } + "alt-right": "editor::AcceptNextWordEditPrediction", + "alt-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && edit_prediction_conflict", "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "alt-right": "editor::AcceptPartialEditPrediction" - } + "alt-right": "editor::AcceptNextWordEditPrediction", + "alt-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && showing_code_actions", "bindings": { - "enter": "editor::ConfirmCodeAction" - } + "enter": "editor::ConfirmCodeAction", + }, }, { "context": "Editor && (showing_code_actions || showing_completions)", @@ -784,29 +776,29 @@ "ctrl-n": "editor::ContextMenuNext", "down": "editor::ContextMenuNext", "pageup": "editor::ContextMenuFirst", - "pagedown": "editor::ContextMenuLast" - } + "pagedown": "editor::ContextMenuLast", + }, }, { "context": "Editor && showing_signature_help && !showing_completions", "bindings": { "up": "editor::SignatureHelpPrevious", - "down": "editor::SignatureHelpNext" - } + "down": "editor::SignatureHelpNext", + }, }, // Custom bindings { "bindings": { "ctrl-alt-shift-f": "workspace::FollowNextCollaborator", // Only available in debug builds: opens an element inspector for development. - "ctrl-alt-i": "dev::ToggleInspector" - } + "ctrl-alt-i": "dev::ToggleInspector", + }, }, { "context": "!Terminal", "bindings": { - "ctrl-shift-c": "collab_panel::ToggleFocus" - } + "ctrl-shift-c": "collab_panel::ToggleFocus", + }, }, { "context": "!ContextEditor > Editor && mode == full", @@ -818,16 +810,17 @@ "ctrl-f8": "editor::GoToHunk", "ctrl-shift-f8": "editor::GoToPreviousHunk", "ctrl-enter": "assistant::InlineAssist", - "ctrl-:": "editor::ToggleInlayHints" - } + "ctrl-:": "editor::ToggleInlayHints", + }, }, { "context": "PromptEditor", "bindings": { "ctrl-[": "agent::CyclePreviousInlineAssist", "ctrl-]": "agent::CycleNextInlineAssist", - "ctrl-alt-e": "agent::RemoveAllContext" - } + "ctrl-shift-enter": "inline_assistant::ThumbsUpResult", + "ctrl-shift-backspace": "inline_assistant::ThumbsDownResult", + }, }, { "context": "Prompt", @@ -835,14 +828,14 @@ "left": "menu::SelectPrevious", "right": "menu::SelectNext", "h": "menu::SelectPrevious", - "l": "menu::SelectNext" - } + "l": "menu::SelectNext", + }, }, { "context": "ProjectSearchBar && !in_replace", "bindings": { - "ctrl-enter": "project_search::SearchInNew" - } + "ctrl-enter": "project_search::SearchInNew", + }, }, { "context": "OutlinePanel && not_editing", @@ -859,13 +852,14 @@ "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", "alt-enter": "editor::OpenExcerpts", - "ctrl-alt-enter": "editor::OpenExcerptsSplit" - } + "ctrl-alt-enter": "editor::OpenExcerptsSplit", + }, }, { "context": "ProjectPanel", "bindings": { "left": "project_panel::CollapseSelectedEntry", + "ctrl-left": "project_panel::CollapseAllEntries", "right": "project_panel::ExpandSelectedEntry", "new": "project_panel::NewFile", "ctrl-n": "project_panel::NewFile", @@ -897,20 +891,22 @@ "ctrl-alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ProjectPanel && not_editing", "bindings": { - "space": "project_panel::Open" - } + "space": "project_panel::Open", + }, }, { "context": "GitPanel && ChangesList", "bindings": { - "up": "menu::SelectPrevious", - "down": "menu::SelectNext", + "left": "git_panel::CollapseSelectedEntry", + "right": "git_panel::ExpandSelectedEntry", + "up": "git_panel::PreviousEntry", + "down": "git_panel::NextEntry", "enter": "menu::Confirm", "alt-y": "git::StageFile", "alt-shift-y": "git::UnstageFile", @@ -925,15 +921,15 @@ "backspace": ["git::RestoreFile", { "skip_prompt": false }], "shift-delete": ["git::RestoreFile", { "skip_prompt": false }], "ctrl-backspace": ["git::RestoreFile", { "skip_prompt": false }], - "ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }] - } + "ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }], + }, }, { "context": "GitPanel && CommitEditor", "use_key_equivalents": true, "bindings": { - "escape": "git::Cancel" - } + "escape": "git::Cancel", + }, }, { "context": "GitCommit > Editor", @@ -942,8 +938,8 @@ "enter": "editor::Newline", "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", - "alt-l": "git::GenerateCommitMessage" - } + "alt-l": "git::GenerateCommitMessage", + }, }, { "context": "GitPanel", @@ -959,8 +955,8 @@ "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", "ctrl-enter": "git::Commit", - "ctrl-shift-enter": "git::Amend" - } + "ctrl-shift-enter": "git::Amend", + }, }, { "context": "GitDiff > Editor", @@ -968,14 +964,14 @@ "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", - "ctrl-shift-space": "git::UnstageAll" - } + "ctrl-shift-space": "git::UnstageAll", + }, }, { "context": "AskPass > Editor", "bindings": { - "enter": "menu::Confirm" - } + "enter": "menu::Confirm", + }, }, { "context": "CommitEditor > Editor", @@ -987,16 +983,16 @@ "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", "alt-up": "git_panel::FocusChanges", - "alt-l": "git::GenerateCommitMessage" - } + "alt-l": "git::GenerateCommitMessage", + }, }, { "context": "DebugPanel", "bindings": { "ctrl-t": "debugger::ToggleThreadPicker", "ctrl-i": "debugger::ToggleSessionPicker", - "shift-alt-escape": "debugger::ToggleExpandItem" - } + "shift-alt-escape": "debugger::ToggleExpandItem", + }, }, { "context": "VariableList", @@ -1008,8 +1004,8 @@ "ctrl-alt-c": "variable_list::CopyVariableName", "delete": "variable_list::RemoveWatch", "backspace": "variable_list::RemoveWatch", - "alt-enter": "variable_list::AddWatch" - } + "alt-enter": "variable_list::AddWatch", + }, }, { "context": "BreakpointList", @@ -1017,35 +1013,35 @@ "space": "debugger::ToggleEnableBreakpoint", "backspace": "debugger::UnsetBreakpoint", "left": "debugger::PreviousBreakpointProperty", - "right": "debugger::NextBreakpointProperty" - } + "right": "debugger::NextBreakpointProperty", + }, }, { "context": "CollabPanel && not_editing", "bindings": { "ctrl-backspace": "collab_panel::Remove", - "space": "menu::Confirm" - } + "space": "menu::Confirm", + }, }, { "context": "CollabPanel", "bindings": { "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", - "alt-enter": "collab_panel::OpenSelectedChannelNotes" - } + "alt-enter": "collab_panel::OpenSelectedChannelNotes", + }, }, { "context": "(CollabPanel && editing) > Editor", "bindings": { - "space": "collab_panel::InsertSpace" - } + "space": "collab_panel::InsertSpace", + }, }, { "context": "ChannelModal", "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "Picker > Editor", @@ -1054,29 +1050,29 @@ "up": "menu::SelectPrevious", "down": "menu::SelectNext", "tab": "picker::ConfirmCompletion", - "alt-enter": ["picker::ConfirmInput", { "secondary": false }] - } + "alt-enter": ["picker::ConfirmInput", { "secondary": false }], + }, }, { "context": "ChannelModal > Picker > Editor", "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "ToolchainSelector", "use_key_equivalents": true, "bindings": { - "ctrl-shift-a": "toolchain::AddToolchain" - } + "ctrl-shift-a": "toolchain::AddToolchain", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor)", "bindings": { "ctrl-p": "file_finder::Toggle", "ctrl-shift-a": "file_finder::ToggleSplitMenu", - "ctrl-shift-i": "file_finder::ToggleFilterMenu" - } + "ctrl-shift-i": "file_finder::ToggleFilterMenu", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)", @@ -1085,8 +1081,8 @@ "ctrl-j": "pane::SplitDown", "ctrl-k": "pane::SplitUp", "ctrl-h": "pane::SplitLeft", - "ctrl-l": "pane::SplitRight" - } + "ctrl-l": "pane::SplitRight", + }, }, { "context": "TabSwitcher", @@ -1094,15 +1090,15 @@ "ctrl-shift-tab": "menu::SelectPrevious", "ctrl-up": "menu::SelectPrevious", "ctrl-down": "menu::SelectNext", - "ctrl-backspace": "tab_switcher::CloseSelectedItem" - } + "ctrl-backspace": "tab_switcher::CloseSelectedItem", + }, }, { "context": "StashList || (StashList > Picker > Editor)", "bindings": { "ctrl-shift-backspace": "stash_picker::DropStashItem", - "ctrl-shift-v": "stash_picker::ShowStashItem" - } + "ctrl-shift-v": "stash_picker::ShowStashItem", + }, }, { "context": "Terminal", @@ -1147,65 +1143,69 @@ "ctrl-shift-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask", "alt-t": "terminal::RerunTask", - "ctrl-shift-5": "pane::SplitRight" - } + "ctrl-shift-5": "pane::SplitRight", + }, }, { "context": "ZedPredictModal", "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ConfigureContextServerModal > Editor", "bindings": { "escape": "menu::Cancel", "enter": "editor::Newline", - "ctrl-enter": "menu::Confirm" - } + "ctrl-enter": "menu::Confirm", + }, }, { "context": "ContextServerToolsModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "OnboardingAiConfigurationModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "Diagnostics", "use_key_equivalents": true, "bindings": { - "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh" - } + "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh", + }, }, { "context": "DebugConsole > Editor", "use_key_equivalents": true, "bindings": { "enter": "menu::Confirm", - "alt-enter": "console::WatchExpression" - } + "alt-enter": "console::WatchExpression", + }, }, { "context": "RunModal", "bindings": { "ctrl-tab": "pane::ActivateNextItem", - "ctrl-shift-tab": "pane::ActivatePreviousItem" - } + "ctrl-shift-tab": "pane::ActivatePreviousItem", + }, }, { "context": "MarkdownPreview", "bindings": { - "pageup": "markdown::MovePageUp", - "pagedown": "markdown::MovePageDown" - } + "pageup": "markdown::ScrollPageUp", + "pagedown": "markdown::ScrollPageDown", + "up": "markdown::ScrollUp", + "down": "markdown::ScrollDown", + "alt-up": "markdown::ScrollUpByItem", + "alt-down": "markdown::ScrollDownByItem", + }, }, { "context": "KeymapEditor", @@ -1219,8 +1219,8 @@ "alt-enter": "keymap_editor::CreateBinding", "ctrl-c": "keymap_editor::CopyAction", "ctrl-shift-c": "keymap_editor::CopyContext", - "ctrl-t": "keymap_editor::ShowMatchingKeybinds" - } + "ctrl-t": "keymap_editor::ShowMatchingKeybinds", + }, }, { "context": "KeystrokeInput", @@ -1228,48 +1228,67 @@ "bindings": { "enter": "keystroke_input::StartRecording", "escape escape escape": "keystroke_input::StopRecording", - "delete": "keystroke_input::ClearKeystrokes" - } + "delete": "keystroke_input::ClearKeystrokes", + }, }, { "context": "KeybindEditorModal", "use_key_equivalents": true, "bindings": { "ctrl-enter": "menu::Confirm", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "KeybindEditorModal > Editor", "use_key_equivalents": true, "bindings": { "up": "menu::SelectPrevious", - "down": "menu::SelectNext" - } + "down": "menu::SelectNext", + }, }, { "context": "Onboarding", "use_key_equivalents": true, "bindings": { + "ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }], + "ctrl-0": ["zed::ResetUiFontSize", { "persist": false }], "ctrl-enter": "onboarding::Finish", "alt-shift-l": "onboarding::SignIn", - "alt-shift-a": "onboarding::OpenAccount" - } + "alt-shift-a": "onboarding::OpenAccount", + }, + }, + { + "context": "Welcome", + "use_key_equivalents": true, + "bindings": { + "ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }], + "ctrl-0": ["zed::ResetUiFontSize", { "persist": false }], + "ctrl-1": ["welcome::OpenRecentProject", 0], + "ctrl-2": ["welcome::OpenRecentProject", 1], + "ctrl-3": ["welcome::OpenRecentProject", 2], + "ctrl-4": ["welcome::OpenRecentProject", 3], + "ctrl-5": ["welcome::OpenRecentProject", 4], + }, }, { "context": "InvalidBuffer", "use_key_equivalents": true, "bindings": { - "ctrl-shift-enter": "workspace::OpenWithSystem" - } + "ctrl-shift-enter": "workspace::OpenWithSystem", + }, }, { "context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", - "ctrl-space": "git::WorktreeFromDefault" - } + "ctrl-space": "git::WorktreeFromDefault", + }, }, { "context": "SettingsWindow", @@ -1279,6 +1298,7 @@ "escape": "workspace::CloseWindow", "ctrl-m": "settings_editor::Minimize", "ctrl-f": "search::FocusSearch", + "ctrl-,": "settings_editor::OpenCurrentFile", "left": "settings_editor::ToggleFocusNav", "ctrl-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI @@ -1293,16 +1313,16 @@ "ctrl-9": ["settings_editor::FocusFile", 8], "ctrl-0": ["settings_editor::FocusFile", 9], "ctrl-pageup": "settings_editor::FocusPreviousFile", - "ctrl-pagedown": "settings_editor::FocusNextFile" - } + "ctrl-pagedown": "settings_editor::FocusNextFile", + }, }, { "context": "StashDiff > Editor", "bindings": { "ctrl-space": "git::ApplyCurrentStash", "ctrl-shift-space": "git::PopCurrentStash", - "ctrl-shift-backspace": "git::DropCurrentStash" - } + "ctrl-shift-backspace": "git::DropCurrentStash", + }, }, { "context": "SettingsWindow > NavigationMenu", @@ -1317,22 +1337,22 @@ "pageup": "settings_editor::FocusPreviousRootNavEntry", "pagedown": "settings_editor::FocusNextRootNavEntry", "home": "settings_editor::FocusFirstNavEntry", - "end": "settings_editor::FocusLastNavEntry" - } + "end": "settings_editor::FocusLastNavEntry", + }, }, { - "context": "Zeta2Feedback > Editor", + "context": "EditPredictionContext > Editor", "bindings": { - "enter": "editor::Newline", - "ctrl-enter up": "dev::Zeta2RatePredictionPositive", - "ctrl-enter down": "dev::Zeta2RatePredictionNegative" - } + "alt-left": "dev::EditPredictionContextGoBack", + "alt-right": "dev::EditPredictionContextGoForward", + }, }, { - "context": "Zeta2Context > Editor", + "context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)", + "use_key_equivalents": true, "bindings": { - "alt-left": "dev::Zeta2ContextGoBack", - "alt-right": "dev::Zeta2ContextGoForward" - } - } + "ctrl-shift-backspace": "branch_picker::DeleteBranch", + "ctrl-shift-i": "branch_picker::FilterRemotes", + }, + }, ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 9d23eeb8cde071e20e5d3e4d7f873b1f668501b2..fd2605a6ad99177c887d6f804ec2ac70724f16f8 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -47,11 +47,12 @@ "cmd-m": "zed::Minimize", "fn-f": "zed::ToggleFullScreen", "ctrl-cmd-f": "zed::ToggleFullScreen", - "ctrl-cmd-z": "edit_prediction::RateCompletions", + "ctrl-cmd-z": "edit_prediction::RatePredictions", "ctrl-cmd-i": "edit_prediction::ToggleMenu", "ctrl-cmd-l": "lsp_tool::ToggleMenu", - "cmd-alt-.": "project_panel::ToggleHideHidden" - } + "ctrl-cmd-c": "editor::DisplayCursorNames", + "ctrl-cmd-s": "workspace::ToggleWorktreeSecurity", + }, }, { "context": "Editor", @@ -148,8 +149,8 @@ "shift-f9": "editor::EditLogBreakpoint", "ctrl-f12": "editor::GoToDeclaration", "alt-ctrl-f12": "editor::GoToDeclarationSplit", - "ctrl-cmd-e": "editor::ToggleEditPrediction" - } + "ctrl-cmd-e": "editor::ToggleEditPrediction", + }, }, { "context": "Editor && mode == full", @@ -167,8 +168,8 @@ "cmd->": "agent::AddSelectionToThread", "cmd-<": "assistant::InsertIntoEditor", "cmd-alt-e": "editor::SelectEnclosingSymbol", - "alt-enter": "editor::OpenSelectionsInMultibuffer" - } + "alt-enter": "editor::OpenSelectionsInMultibuffer", + }, }, { "context": "Editor && multibuffer", @@ -177,23 +178,23 @@ "cmd-up": "editor::MoveToStartOfExcerpt", "cmd-down": "editor::MoveToStartOfNextExcerpt", "cmd-shift-up": "editor::SelectToStartOfExcerpt", - "cmd-shift-down": "editor::SelectToStartOfNextExcerpt" - } + "cmd-shift-down": "editor::SelectToStartOfNextExcerpt", + }, }, { "context": "Editor && mode == full && edit_prediction", "use_key_equivalents": true, "bindings": { "alt-tab": "editor::NextEditPrediction", - "alt-shift-tab": "editor::PreviousEditPrediction" - } + "alt-shift-tab": "editor::PreviousEditPrediction", + }, }, { "context": "Editor && !edit_prediction", "use_key_equivalents": true, "bindings": { - "alt-tab": "editor::ShowEditPrediction" - } + "alt-tab": "editor::ShowEditPrediction", + }, }, { "context": "Editor && mode == auto_height", @@ -201,23 +202,23 @@ "bindings": { "ctrl-enter": "editor::Newline", "shift-enter": "editor::Newline", - "ctrl-shift-enter": "editor::NewlineBelow" - } + "ctrl-shift-enter": "editor::NewlineBelow", + }, }, { "context": "Markdown", "use_key_equivalents": true, "bindings": { - "cmd-c": "markdown::Copy" - } + "cmd-c": "markdown::Copy", + }, }, { "context": "Editor && jupyter && !ContextEditor", "use_key_equivalents": true, "bindings": { "ctrl-shift-enter": "repl::Run", - "ctrl-alt-enter": "repl::RunInPlace" - } + "ctrl-alt-enter": "repl::RunInPlace", + }, }, { "context": "Editor && !agent_diff && !AgentPanel", @@ -226,8 +227,8 @@ "cmd-alt-z": "git::Restore", "cmd-alt-y": "git::ToggleStaged", "cmd-y": "git::StageAndNext", - "cmd-shift-y": "git::UnstageAndNext" - } + "cmd-shift-y": "git::UnstageAndNext", + }, }, { "context": "AgentDiff", @@ -236,8 +237,8 @@ "cmd-y": "agent::Keep", "cmd-n": "agent::Reject", "cmd-shift-y": "agent::KeepAll", - "cmd-shift-n": "agent::RejectAll" - } + "cmd-shift-n": "agent::RejectAll", + }, }, { "context": "Editor && editor_agent_diff", @@ -247,8 +248,8 @@ "cmd-n": "agent::Reject", "cmd-shift-y": "agent::KeepAll", "cmd-shift-n": "agent::RejectAll", - "shift-ctrl-r": "agent::OpenAgentDiff" - } + "shift-ctrl-r": "agent::OpenAgentDiff", + }, }, { "context": "ContextEditor > Editor", @@ -264,8 +265,9 @@ "cmd-k c": "assistant::CopyCode", "cmd-g": "search::SelectNextMatch", "cmd-shift-g": "search::SelectPreviousMatch", - "cmd-k l": "agent::OpenRulesLibrary" - } + "cmd-k l": "agent::OpenRulesLibrary", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AgentPanel", @@ -279,50 +281,49 @@ "cmd-alt-p": "agent::ManageProfiles", "cmd-i": "agent::ToggleProfileSelector", "cmd-alt-/": "agent::ToggleModelSelector", - "cmd-shift-a": "agent::ToggleContextPicker", "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-alt-m": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", "cmd->": "agent::AddSelectionToThread", - "cmd-alt-e": "agent::RemoveAllContext", "cmd-shift-e": "project_panel::ToggleFocus", "cmd-ctrl-b": "agent::ToggleBurnMode", "cmd-shift-enter": "agent::ContinueThread", "alt-enter": "agent::ContinueWithBurnMode", "cmd-y": "agent::AllowOnce", "cmd-alt-y": "agent::AllowAlways", - "cmd-alt-z": "agent::RejectOnce" - } + "cmd-alt-z": "agent::RejectOnce", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AgentPanel > NavigationMenu", "bindings": { - "shift-backspace": "agent::DeleteRecentlyOpenThread" - } + "shift-backspace": "agent::DeleteRecentlyOpenThread", + }, }, { "context": "AgentPanel > Markdown", "use_key_equivalents": true, "bindings": { - "cmd-c": "markdown::CopyAsMarkdown" - } + "cmd-c": "markdown::CopyAsMarkdown", + }, }, { "context": "AgentPanel && text_thread", "use_key_equivalents": true, "bindings": { "cmd-n": "agent::NewTextThread", - "cmd-alt-t": "agent::NewThread" - } + "cmd-alt-n": "agent::NewExternalAgentThread", + }, }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "cmd-n": "agent::NewExternalAgentThread", - "cmd-alt-t": "agent::NewThread" - } + "cmd-alt-t": "agent::NewThread", + }, }, { "context": "MessageEditor && !Picker > Editor && !use_modifier_to_send", @@ -333,8 +334,8 @@ "cmd-i": "agent::ToggleProfileSelector", "shift-ctrl-r": "agent::OpenAgentDiff", "cmd-shift-y": "agent::KeepAll", - "cmd-shift-n": "agent::RejectAll" - } + "cmd-shift-n": "agent::RejectAll", + }, }, { "context": "MessageEditor && !Picker > Editor && use_modifier_to_send", @@ -345,8 +346,8 @@ "cmd-i": "agent::ToggleProfileSelector", "shift-ctrl-r": "agent::OpenAgentDiff", "cmd-shift-y": "agent::KeepAll", - "cmd-shift-n": "agent::RejectAll" - } + "cmd-shift-n": "agent::RejectAll", + }, }, { "context": "EditMessageEditor > Editor", @@ -354,8 +355,8 @@ "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AgentFeedbackMessageEditor > Editor", @@ -363,32 +364,20 @@ "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } - }, - { - "context": "ContextStrip", - "use_key_equivalents": true, - "bindings": { - "up": "agent::FocusUp", - "right": "agent::FocusRight", - "left": "agent::FocusLeft", - "down": "agent::FocusDown", - "backspace": "agent::RemoveFocusedContext", - "enter": "agent::AcceptSuggestedContext" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AgentConfiguration", "bindings": { - "ctrl--": "pane::GoBack" - } + "ctrl--": "pane::GoBack", + }, }, { "context": "AcpThread > ModeSelector", "bindings": { - "cmd-enter": "menu::Confirm" - } + "cmd-enter": "menu::Confirm", + }, }, { "context": "AcpThread > Editor && !use_modifier_to_send", @@ -398,8 +387,9 @@ "shift-ctrl-r": "agent::OpenAgentDiff", "cmd-shift-y": "agent::KeepAll", "cmd-shift-n": "agent::RejectAll", - "shift-tab": "agent::CycleModeSelector" - } + "shift-tab": "agent::CycleModeSelector", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AcpThread > Editor && use_modifier_to_send", @@ -409,20 +399,21 @@ "shift-ctrl-r": "agent::OpenAgentDiff", "cmd-shift-y": "agent::KeepAll", "cmd-shift-n": "agent::RejectAll", - "shift-tab": "agent::CycleModeSelector" - } + "shift-tab": "agent::CycleModeSelector", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "ThreadHistory", "bindings": { - "ctrl--": "pane::GoBack" - } + "ctrl--": "pane::GoBack", + }, }, { "context": "ThreadHistory > Editor", "bindings": { - "shift-backspace": "agent::RemoveSelectedThread" - } + "shift-backspace": "agent::RemoveSelectedThread", + }, }, { "context": "RulesLibrary", @@ -430,8 +421,8 @@ "bindings": { "cmd-n": "rules_library::NewRule", "cmd-shift-s": "rules_library::ToggleDefaultRule", - "cmd-w": "workspace::CloseWindow" - } + "cmd-w": "workspace::CloseWindow", + }, }, { "context": "BufferSearchBar", @@ -445,24 +436,24 @@ "cmd-f": "search::FocusSearch", "cmd-alt-f": "search::ToggleReplace", "cmd-alt-l": "search::ToggleSelection", - "cmd-shift-o": "outline::Toggle" - } + "cmd-shift-o": "outline::Toggle", + }, }, { "context": "BufferSearchBar && in_replace > Editor", "use_key_equivalents": true, "bindings": { "enter": "search::ReplaceNext", - "cmd-enter": "search::ReplaceAll" - } + "cmd-enter": "search::ReplaceAll", + }, }, { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar", @@ -474,24 +465,24 @@ "cmd-shift-f": "search::FocusSearch", "cmd-shift-h": "search::ToggleReplace", "alt-cmd-g": "search::ToggleRegex", - "alt-cmd-x": "search::ToggleRegex" - } + "alt-cmd-x": "search::ToggleRegex", + }, }, { "context": "ProjectSearchBar > Editor", "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar && in_replace > Editor", "use_key_equivalents": true, "bindings": { "enter": "search::ReplaceNext", - "cmd-enter": "search::ReplaceAll" - } + "cmd-enter": "search::ReplaceAll", + }, }, { "context": "ProjectSearchView", @@ -502,8 +493,8 @@ "shift-enter": "project_search::ToggleAllSearchResults", "cmd-shift-h": "search::ToggleReplace", "alt-cmd-g": "search::ToggleRegex", - "alt-cmd-x": "search::ToggleRegex" - } + "alt-cmd-x": "search::ToggleRegex", + }, }, { "context": "Pane", @@ -533,8 +524,8 @@ "alt-cmd-w": "search::ToggleWholeWord", "alt-cmd-f": "project_search::ToggleFilters", "alt-cmd-x": "search::ToggleRegex", - "cmd-k shift-enter": "pane::TogglePinTab" - } + "cmd-k shift-enter": "pane::TogglePinTab", + }, }, // Bindings from VS Code { @@ -605,24 +596,23 @@ "cmd-k r": "editor::RevealInFileManager", "cmd-k p": "editor::CopyPath", "cmd-\\": "pane::SplitRight", - "ctrl-cmd-c": "editor::DisplayCursorNames" - } + }, }, { "context": "Editor && extension == md", "use_key_equivalents": true, "bindings": { "cmd-k v": "markdown::OpenPreviewToTheSide", - "cmd-shift-v": "markdown::OpenPreview" - } + "cmd-shift-v": "markdown::OpenPreview", + }, }, { "context": "Editor && extension == svg", "use_key_equivalents": true, "bindings": { "cmd-k v": "svg::OpenPreviewToTheSide", - "cmd-shift-v": "svg::OpenPreview" - } + "cmd-shift-v": "svg::OpenPreview", + }, }, { "context": "Editor && mode == full", @@ -631,8 +621,8 @@ "cmd-shift-o": "outline::Toggle", "ctrl-g": "go_to_line::Toggle", "cmd-shift-backspace": "editor::GoToPreviousChange", - "cmd-shift-alt-backspace": "editor::GoToNextChange" - } + "cmd-shift-alt-backspace": "editor::GoToNextChange", + }, }, { "context": "Pane", @@ -650,8 +640,8 @@ "ctrl-0": "pane::ActivateLastItem", "ctrl--": "pane::GoBack", "ctrl-_": "pane::GoForward", - "cmd-shift-f": "pane::DeploySearch" - } + "cmd-shift-f": "pane::DeploySearch", + }, }, { "context": "Workspace", @@ -699,8 +689,8 @@ "ctrl-alt-cmd-p": "settings_profile_selector::Toggle", "cmd-t": "project_symbols::Toggle", "cmd-p": "file_finder::Toggle", - "ctrl-tab": "tab_switcher::Toggle", "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }], + "ctrl-tab": "tab_switcher::Toggle", "cmd-shift-p": "command_palette::Toggle", "cmd-shift-m": "diagnostics::Deploy", "cmd-shift-e": "project_panel::ToggleFocus", @@ -722,8 +712,8 @@ "cmd-k shift-down": "workspace::SwapPaneDown", "cmd-shift-x": "zed::Extensions", "f5": "debugger::Rerun", - "cmd-w": "workspace::CloseActiveDock" - } + "cmd-w": "workspace::CloseActiveDock", + }, }, { "context": "Workspace && !Terminal", @@ -734,26 +724,27 @@ // All task parameters are captured and unchanged between reruns by default. // Use the `"reevaluate_context"` parameter to control this. "cmd-alt-r": ["task::Rerun", { "reevaluate_context": false }], - "ctrl-alt-shift-r": ["task::Spawn", { "reveal_target": "center" }] + "ctrl-alt-shift-r": ["task::Spawn", { "reveal_target": "center" }], // also possible to spawn tasks by name: // "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }] // or by tag: // "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }], - } + }, }, { "context": "Workspace && debugger_running", "use_key_equivalents": true, "bindings": { - "f5": "zed::NoAction" - } + "f5": "zed::NoAction", + "f11": "debugger::StepInto", + }, }, { "context": "Workspace && debugger_stopped", "use_key_equivalents": true, "bindings": { - "f5": "debugger::Continue" - } + "f5": "debugger::Continue", + }, }, // Bindings from Sublime Text { @@ -774,8 +765,8 @@ "ctrl-alt-shift-left": "editor::SelectToPreviousSubwordStart", "ctrl-alt-shift-b": "editor::SelectToPreviousSubwordStart", "ctrl-alt-shift-right": "editor::SelectToNextSubwordEnd", - "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd" - } + "ctrl-alt-shift-f": "editor::SelectToNextSubwordEnd", + }, }, // Bindings from Atom { @@ -785,16 +776,16 @@ "cmd-k up": "pane::SplitUp", "cmd-k down": "pane::SplitDown", "cmd-k left": "pane::SplitLeft", - "cmd-k right": "pane::SplitRight" - } + "cmd-k right": "pane::SplitRight", + }, }, // Bindings that should be unified with bindings for more general actions { "context": "Editor && renaming", "use_key_equivalents": true, "bindings": { - "enter": "editor::ConfirmRename" - } + "enter": "editor::ConfirmRename", + }, }, { "context": "Editor && showing_completions", @@ -802,45 +793,47 @@ "bindings": { "enter": "editor::ConfirmCompletion", "shift-enter": "editor::ConfirmCompletionReplace", - "tab": "editor::ComposeCompletion" - } + "tab": "editor::ComposeCompletion", + }, }, { "context": "Editor && in_snippet && has_next_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "tab": "editor::NextSnippetTabstop" - } + "tab": "editor::NextSnippetTabstop", + }, }, { "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "shift-tab": "editor::PreviousSnippetTabstop" - } + "shift-tab": "editor::PreviousSnippetTabstop", + }, }, { "context": "Editor && edit_prediction", "bindings": { "alt-tab": "editor::AcceptEditPrediction", "tab": "editor::AcceptEditPrediction", - "ctrl-cmd-right": "editor::AcceptPartialEditPrediction" - } + "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", + "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && edit_prediction_conflict", "use_key_equivalents": true, "bindings": { "alt-tab": "editor::AcceptEditPrediction", - "ctrl-cmd-right": "editor::AcceptPartialEditPrediction" - } + "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", + "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && showing_code_actions", "use_key_equivalents": true, "bindings": { - "enter": "editor::ConfirmCodeAction" - } + "enter": "editor::ConfirmCodeAction", + }, }, { "context": "Editor && (showing_code_actions || showing_completions)", @@ -851,15 +844,15 @@ "down": "editor::ContextMenuNext", "ctrl-n": "editor::ContextMenuNext", "pageup": "editor::ContextMenuFirst", - "pagedown": "editor::ContextMenuLast" - } + "pagedown": "editor::ContextMenuLast", + }, }, { "context": "Editor && showing_signature_help && !showing_completions", "bindings": { "up": "editor::SignatureHelpPrevious", - "down": "editor::SignatureHelpNext" - } + "down": "editor::SignatureHelpNext", + }, }, // Custom bindings { @@ -869,8 +862,8 @@ // TODO: Move this to a dock open action "cmd-shift-c": "collab_panel::ToggleFocus", // Only available in debug builds: opens an element inspector for development. - "cmd-alt-i": "dev::ToggleInspector" - } + "cmd-alt-i": "dev::ToggleInspector", + }, }, { "context": "!ContextEditor > Editor && mode == full", @@ -883,19 +876,20 @@ "cmd-f8": "editor::GoToHunk", "cmd-shift-f8": "editor::GoToPreviousHunk", "ctrl-enter": "assistant::InlineAssist", - "ctrl-:": "editor::ToggleInlayHints" - } + "ctrl-:": "editor::ToggleInlayHints", + }, }, { "context": "PromptEditor", "use_key_equivalents": true, "bindings": { - "cmd-shift-a": "agent::ToggleContextPicker", "cmd-alt-/": "agent::ToggleModelSelector", - "cmd-alt-e": "agent::RemoveAllContext", + "alt-tab": "agent::CycleFavoriteModels", "ctrl-[": "agent::CyclePreviousInlineAssist", - "ctrl-]": "agent::CycleNextInlineAssist" - } + "ctrl-]": "agent::CycleNextInlineAssist", + "cmd-shift-enter": "inline_assistant::ThumbsUpResult", + "cmd-shift-backspace": "inline_assistant::ThumbsDownResult", + }, }, { "context": "Prompt", @@ -904,15 +898,15 @@ "left": "menu::SelectPrevious", "right": "menu::SelectNext", "h": "menu::SelectPrevious", - "l": "menu::SelectNext" - } + "l": "menu::SelectNext", + }, }, { "context": "ProjectSearchBar && !in_replace", "use_key_equivalents": true, "bindings": { - "cmd-enter": "project_search::SearchInNew" - } + "cmd-enter": "project_search::SearchInNew", + }, }, { "context": "OutlinePanel && not_editing", @@ -928,14 +922,15 @@ "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", "alt-enter": "editor::OpenExcerpts", - "cmd-alt-enter": "editor::OpenExcerptsSplit" - } + "cmd-alt-enter": "editor::OpenExcerptsSplit", + }, }, { "context": "ProjectPanel", "use_key_equivalents": true, "bindings": { "left": "project_panel::CollapseSelectedEntry", + "cmd-left": "project_panel::CollapseAllEntries", "right": "project_panel::ExpandSelectedEntry", "cmd-n": "project_panel::NewFile", "cmd-d": "project_panel::Duplicate", @@ -958,15 +953,15 @@ "cmd-alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ProjectPanel && not_editing", "use_key_equivalents": true, "bindings": { - "space": "project_panel::Open" - } + "space": "project_panel::Open", + }, }, { "context": "VariableList", @@ -979,17 +974,19 @@ "cmd-alt-c": "variable_list::CopyVariableName", "delete": "variable_list::RemoveWatch", "backspace": "variable_list::RemoveWatch", - "alt-enter": "variable_list::AddWatch" - } + "alt-enter": "variable_list::AddWatch", + }, }, { "context": "GitPanel && ChangesList", "use_key_equivalents": true, "bindings": { - "up": "menu::SelectPrevious", - "down": "menu::SelectNext", - "cmd-up": "menu::SelectFirst", - "cmd-down": "menu::SelectLast", + "up": "git_panel::PreviousEntry", + "down": "git_panel::NextEntry", + "cmd-up": "git_panel::FirstEntry", + "cmd-down": "git_panel::LastEntry", + "left": "git_panel::CollapseSelectedEntry", + "right": "git_panel::ExpandSelectedEntry", "enter": "menu::Confirm", "cmd-alt-y": "git::ToggleStaged", "space": "git::ToggleStaged", @@ -1003,15 +1000,15 @@ "backspace": ["git::RestoreFile", { "skip_prompt": false }], "delete": ["git::RestoreFile", { "skip_prompt": false }], "cmd-backspace": ["git::RestoreFile", { "skip_prompt": true }], - "cmd-delete": ["git::RestoreFile", { "skip_prompt": true }] - } + "cmd-delete": ["git::RestoreFile", { "skip_prompt": true }], + }, }, { "context": "GitPanel && CommitEditor", "use_key_equivalents": true, "bindings": { - "escape": "git::Cancel" - } + "escape": "git::Cancel", + }, }, { "context": "GitDiff > Editor", @@ -1020,8 +1017,8 @@ "cmd-enter": "git::Commit", "cmd-shift-enter": "git::Amend", "cmd-ctrl-y": "git::StageAll", - "cmd-ctrl-shift-y": "git::UnstageAll" - } + "cmd-ctrl-shift-y": "git::UnstageAll", + }, }, { "context": "CommitEditor > Editor", @@ -1034,8 +1031,8 @@ "shift-tab": "git_panel::FocusChanges", "alt-up": "git_panel::FocusChanges", "shift-escape": "git::ExpandCommitEditor", - "alt-tab": "git::GenerateCommitMessage" - } + "alt-tab": "git::GenerateCommitMessage", + }, }, { "context": "GitPanel", @@ -1052,8 +1049,8 @@ "cmd-ctrl-y": "git::StageAll", "cmd-ctrl-shift-y": "git::UnstageAll", "cmd-enter": "git::Commit", - "cmd-shift-enter": "git::Amend" - } + "cmd-shift-enter": "git::Amend", + }, }, { "context": "GitCommit > Editor", @@ -1063,16 +1060,16 @@ "escape": "menu::Cancel", "cmd-enter": "git::Commit", "cmd-shift-enter": "git::Amend", - "alt-tab": "git::GenerateCommitMessage" - } + "alt-tab": "git::GenerateCommitMessage", + }, }, { "context": "DebugPanel", "bindings": { "cmd-t": "debugger::ToggleThreadPicker", "cmd-i": "debugger::ToggleSessionPicker", - "shift-alt-escape": "debugger::ToggleExpandItem" - } + "shift-alt-escape": "debugger::ToggleExpandItem", + }, }, { "context": "BreakpointList", @@ -1080,16 +1077,16 @@ "space": "debugger::ToggleEnableBreakpoint", "backspace": "debugger::UnsetBreakpoint", "left": "debugger::PreviousBreakpointProperty", - "right": "debugger::NextBreakpointProperty" - } + "right": "debugger::NextBreakpointProperty", + }, }, { "context": "CollabPanel && not_editing", "use_key_equivalents": true, "bindings": { "ctrl-backspace": "collab_panel::Remove", - "space": "menu::Confirm" - } + "space": "menu::Confirm", + }, }, { "context": "CollabPanel", @@ -1097,22 +1094,22 @@ "bindings": { "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", - "alt-enter": "collab_panel::OpenSelectedChannelNotes" - } + "alt-enter": "collab_panel::OpenSelectedChannelNotes", + }, }, { "context": "(CollabPanel && editing) > Editor", "use_key_equivalents": true, "bindings": { - "space": "collab_panel::InsertSpace" - } + "space": "collab_panel::InsertSpace", + }, }, { "context": "ChannelModal", "use_key_equivalents": true, "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "Picker > Editor", @@ -1123,30 +1120,30 @@ "down": "menu::SelectNext", "tab": "picker::ConfirmCompletion", "alt-enter": ["picker::ConfirmInput", { "secondary": false }], - "cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }] - } + "cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }], + }, }, { "context": "ChannelModal > Picker > Editor", "use_key_equivalents": true, "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "ToolchainSelector", "use_key_equivalents": true, "bindings": { - "cmd-shift-a": "toolchain::AddToolchain" - } + "cmd-shift-a": "toolchain::AddToolchain", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor)", "use_key_equivalents": true, "bindings": { "cmd-shift-a": "file_finder::ToggleSplitMenu", - "cmd-shift-i": "file_finder::ToggleFilterMenu" - } + "cmd-shift-i": "file_finder::ToggleFilterMenu", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)", @@ -1156,8 +1153,8 @@ "cmd-j": "pane::SplitDown", "cmd-k": "pane::SplitUp", "cmd-h": "pane::SplitLeft", - "cmd-l": "pane::SplitRight" - } + "cmd-l": "pane::SplitRight", + }, }, { "context": "TabSwitcher", @@ -1166,16 +1163,16 @@ "ctrl-shift-tab": "menu::SelectPrevious", "ctrl-up": "menu::SelectPrevious", "ctrl-down": "menu::SelectNext", - "ctrl-backspace": "tab_switcher::CloseSelectedItem" - } + "ctrl-backspace": "tab_switcher::CloseSelectedItem", + }, }, { "context": "StashList || (StashList > Picker > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-shift-backspace": "stash_picker::DropStashItem", - "ctrl-shift-v": "stash_picker::ShowStashItem" - } + "ctrl-shift-v": "stash_picker::ShowStashItem", + }, }, { "context": "Terminal", @@ -1230,35 +1227,35 @@ "ctrl-alt-left": "pane::SplitLeft", "ctrl-alt-right": "pane::SplitRight", "cmd-d": "pane::SplitRight", - "cmd-alt-r": "terminal::RerunTask" - } + "cmd-alt-r": "terminal::RerunTask", + }, }, { - "context": "RateCompletionModal", + "context": "RatePredictionsModal", "use_key_equivalents": true, "bindings": { - "cmd-shift-enter": "zeta::ThumbsUpActiveCompletion", - "cmd-shift-backspace": "zeta::ThumbsDownActiveCompletion", + "cmd-shift-enter": "zeta::ThumbsUpActivePrediction", + "cmd-shift-backspace": "zeta::ThumbsDownActivePrediction", "shift-down": "zeta::NextEdit", "shift-up": "zeta::PreviousEdit", - "right": "zeta::PreviewCompletion" - } + "right": "zeta::PreviewPrediction", + }, }, { - "context": "RateCompletionModal > Editor", + "context": "RatePredictionsModal > Editor", "use_key_equivalents": true, "bindings": { - "escape": "zeta::FocusCompletions", - "cmd-shift-enter": "zeta::ThumbsUpActiveCompletion", - "cmd-shift-backspace": "zeta::ThumbsDownActiveCompletion" - } + "escape": "zeta::FocusPredictions", + "cmd-shift-enter": "zeta::ThumbsUpActivePrediction", + "cmd-shift-backspace": "zeta::ThumbsDownActivePrediction", + }, }, { "context": "ZedPredictModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ConfigureContextServerModal > Editor", @@ -1266,52 +1263,56 @@ "bindings": { "escape": "menu::Cancel", "enter": "editor::Newline", - "cmd-enter": "menu::Confirm" - } + "cmd-enter": "menu::Confirm", + }, }, { "context": "ContextServerToolsModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "OnboardingAiConfigurationModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "Diagnostics", "use_key_equivalents": true, "bindings": { - "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh" - } + "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh", + }, }, { "context": "DebugConsole > Editor", "use_key_equivalents": true, "bindings": { "enter": "menu::Confirm", - "alt-enter": "console::WatchExpression" - } + "alt-enter": "console::WatchExpression", + }, }, { "context": "RunModal", "use_key_equivalents": true, "bindings": { "ctrl-tab": "pane::ActivateNextItem", - "ctrl-shift-tab": "pane::ActivatePreviousItem" - } + "ctrl-shift-tab": "pane::ActivatePreviousItem", + }, }, { "context": "MarkdownPreview", "bindings": { - "pageup": "markdown::MovePageUp", - "pagedown": "markdown::MovePageDown" - } + "pageup": "markdown::ScrollPageUp", + "pagedown": "markdown::ScrollPageDown", + "up": "markdown::ScrollUp", + "down": "markdown::ScrollDown", + "alt-up": "markdown::ScrollUpByItem", + "alt-down": "markdown::ScrollDownByItem", + }, }, { "context": "KeymapEditor", @@ -1324,8 +1325,8 @@ "alt-enter": "keymap_editor::CreateBinding", "cmd-c": "keymap_editor::CopyAction", "cmd-shift-c": "keymap_editor::CopyContext", - "cmd-t": "keymap_editor::ShowMatchingKeybinds" - } + "cmd-t": "keymap_editor::ShowMatchingKeybinds", + }, }, { "context": "KeystrokeInput", @@ -1333,48 +1334,67 @@ "bindings": { "enter": "keystroke_input::StartRecording", "escape escape escape": "keystroke_input::StopRecording", - "delete": "keystroke_input::ClearKeystrokes" - } + "delete": "keystroke_input::ClearKeystrokes", + }, }, { "context": "KeybindEditorModal", "use_key_equivalents": true, "bindings": { "cmd-enter": "menu::Confirm", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "KeybindEditorModal > Editor", "use_key_equivalents": true, "bindings": { "up": "menu::SelectPrevious", - "down": "menu::SelectNext" - } + "down": "menu::SelectNext", + }, }, { "context": "Onboarding", "use_key_equivalents": true, "bindings": { + "cmd-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "cmd-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "cmd--": ["zed::DecreaseUiFontSize", { "persist": false }], + "cmd-0": ["zed::ResetUiFontSize", { "persist": false }], "cmd-enter": "onboarding::Finish", "alt-tab": "onboarding::SignIn", - "alt-shift-a": "onboarding::OpenAccount" - } + "alt-shift-a": "onboarding::OpenAccount", + }, + }, + { + "context": "Welcome", + "use_key_equivalents": true, + "bindings": { + "cmd-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "cmd-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "cmd--": ["zed::DecreaseUiFontSize", { "persist": false }], + "cmd-0": ["zed::ResetUiFontSize", { "persist": false }], + "cmd-1": ["welcome::OpenRecentProject", 0], + "cmd-2": ["welcome::OpenRecentProject", 1], + "cmd-3": ["welcome::OpenRecentProject", 2], + "cmd-4": ["welcome::OpenRecentProject", 3], + "cmd-5": ["welcome::OpenRecentProject", 4], + }, }, { "context": "InvalidBuffer", "use_key_equivalents": true, "bindings": { - "ctrl-shift-enter": "workspace::OpenWithSystem" - } + "ctrl-shift-enter": "workspace::OpenWithSystem", + }, }, { "context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", - "ctrl-space": "git::WorktreeFromDefault" - } + "ctrl-space": "git::WorktreeFromDefault", + }, }, { "context": "SettingsWindow", @@ -1384,6 +1404,7 @@ "escape": "workspace::CloseWindow", "cmd-m": "settings_editor::Minimize", "cmd-f": "search::FocusSearch", + "cmd-,": "settings_editor::OpenCurrentFile", "left": "settings_editor::ToggleFocusNav", "cmd-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI @@ -1398,8 +1419,8 @@ "ctrl-9": ["settings_editor::FocusFile", 8], "ctrl-0": ["settings_editor::FocusFile", 9], "cmd-{": "settings_editor::FocusPreviousFile", - "cmd-}": "settings_editor::FocusNextFile" - } + "cmd-}": "settings_editor::FocusNextFile", + }, }, { "context": "StashDiff > Editor", @@ -1407,8 +1428,8 @@ "bindings": { "ctrl-space": "git::ApplyCurrentStash", "ctrl-shift-space": "git::PopCurrentStash", - "ctrl-shift-backspace": "git::DropCurrentStash" - } + "ctrl-shift-backspace": "git::DropCurrentStash", + }, }, { "context": "SettingsWindow > NavigationMenu", @@ -1423,22 +1444,22 @@ "pageup": "settings_editor::FocusPreviousRootNavEntry", "pagedown": "settings_editor::FocusNextRootNavEntry", "home": "settings_editor::FocusFirstNavEntry", - "end": "settings_editor::FocusLastNavEntry" - } + "end": "settings_editor::FocusLastNavEntry", + }, }, { - "context": "Zeta2Feedback > Editor", + "context": "EditPredictionContext > Editor", "bindings": { - "enter": "editor::Newline", - "cmd-enter up": "dev::Zeta2RatePredictionPositive", - "cmd-enter down": "dev::Zeta2RatePredictionNegative" - } + "alt-left": "dev::EditPredictionContextGoBack", + "alt-right": "dev::EditPredictionContextGoForward", + }, }, { - "context": "Zeta2Context > Editor", + "context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)", + "use_key_equivalents": true, "bindings": { - "alt-left": "dev::Zeta2ContextGoBack", - "alt-right": "dev::Zeta2ContextGoForward" - } - } + "cmd-shift-backspace": "branch_picker::DeleteBranch", + "cmd-shift-i": "branch_picker::FilterRemotes", + }, + }, ] diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 3fe5778e5c1219ee2b5fc9691ac876ec61debe06..4a700e2c9190a8ae23ed53edaa075703fa07b855 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -24,7 +24,8 @@ "ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }], "ctrl-shift-w": "workspace::CloseWindow", "shift-escape": "workspace::ToggleZoom", - "ctrl-o": "workspace::Open", + "ctrl-o": "workspace::OpenFiles", + "ctrl-k ctrl-o": "workspace::Open", "ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], @@ -36,22 +37,22 @@ "shift-f5": "debugger::Stop", "ctrl-shift-f5": "debugger::RerunSession", "f6": "debugger::Pause", - "f7": "debugger::StepOver", - "ctrl-f11": "debugger::StepInto", + "f10": "debugger::StepOver", "shift-f11": "debugger::StepOut", "f11": "zed::ToggleFullScreen", "ctrl-shift-i": "edit_prediction::ToggleMenu", "shift-alt-l": "lsp_tool::ToggleMenu", - "ctrl-alt-.": "project_panel::ToggleHideHidden" - } + "ctrl-shift-alt-c": "editor::DisplayCursorNames", + "ctrl-shift-alt-s": "workspace::ToggleWorktreeSecurity", + }, }, { "context": "Picker || menu", "use_key_equivalents": true, "bindings": { "up": "menu::SelectPrevious", - "down": "menu::SelectNext" - } + "down": "menu::SelectNext", + }, }, { "context": "Editor", @@ -63,7 +64,6 @@ "delete": "editor::Delete", "tab": "editor::Tab", "shift-tab": "editor::Backtab", - "ctrl-k": "editor::CutToEndOfLine", "ctrl-k ctrl-q": "editor::Rewrap", "ctrl-k q": "editor::Rewrap", "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }], @@ -118,10 +118,10 @@ "alt-g m": "git::OpenModifiedFiles", "menu": "editor::OpenContextMenu", "shift-f10": "editor::OpenContextMenu", - "ctrl-shift-e": "editor::ToggleEditPrediction", + "ctrl-alt-e": "editor::ToggleEditPrediction", "f9": "editor::ToggleBreakpoint", - "shift-f9": "editor::EditLogBreakpoint" - } + "shift-f9": "editor::EditLogBreakpoint", + }, }, { "context": "Editor && mode == full", @@ -140,23 +140,23 @@ "shift-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-alt-backspace": "editor::GoToNextChange", - "alt-enter": "editor::OpenSelectionsInMultibuffer" - } + "alt-enter": "editor::OpenSelectionsInMultibuffer", + }, }, { "context": "Editor && mode == full && edit_prediction", "use_key_equivalents": true, "bindings": { "alt-]": "editor::NextEditPrediction", - "alt-[": "editor::PreviousEditPrediction" - } + "alt-[": "editor::PreviousEditPrediction", + }, }, { "context": "Editor && !edit_prediction", "use_key_equivalents": true, "bindings": { - "alt-\\": "editor::ShowEditPrediction" - } + "alt-\\": "editor::ShowEditPrediction", + }, }, { "context": "Editor && mode == auto_height", @@ -164,23 +164,23 @@ "bindings": { "ctrl-enter": "editor::Newline", "shift-enter": "editor::Newline", - "ctrl-shift-enter": "editor::NewlineBelow" - } + "ctrl-shift-enter": "editor::NewlineBelow", + }, }, { "context": "Markdown", "use_key_equivalents": true, "bindings": { - "ctrl-c": "markdown::Copy" - } + "ctrl-c": "markdown::Copy", + }, }, { "context": "Editor && jupyter && !ContextEditor", "use_key_equivalents": true, "bindings": { "ctrl-shift-enter": "repl::Run", - "ctrl-alt-enter": "repl::RunInPlace" - } + "ctrl-alt-enter": "repl::RunInPlace", + }, }, { "context": "Editor && !agent_diff", @@ -188,8 +188,8 @@ "bindings": { "ctrl-k ctrl-r": "git::Restore", "alt-y": "git::StageAndNext", - "shift-alt-y": "git::UnstageAndNext" - } + "shift-alt-y": "git::UnstageAndNext", + }, }, { "context": "Editor && editor_agent_diff", @@ -199,8 +199,8 @@ "ctrl-n": "agent::Reject", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll", - "ctrl-shift-r": "agent::OpenAgentDiff" - } + "ctrl-shift-r": "agent::OpenAgentDiff", + }, }, { "context": "AgentDiff", @@ -209,14 +209,14 @@ "ctrl-y": "agent::Keep", "ctrl-n": "agent::Reject", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "ContextEditor > Editor", "use_key_equivalents": true, "bindings": { - "ctrl-enter": "assistant::Assist", + "ctrl-i": "assistant::Assist", "ctrl-s": "workspace::Save", "ctrl-shift-,": "assistant::InsertIntoEditor", "shift-enter": "assistant::Split", @@ -226,8 +226,8 @@ "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPreviousMatch", - "ctrl-k l": "agent::OpenRulesLibrary" - } + "ctrl-k l": "agent::OpenRulesLibrary", + }, }, { "context": "AgentPanel", @@ -241,51 +241,50 @@ "shift-alt-p": "agent::ManageProfiles", "ctrl-i": "agent::ToggleProfileSelector", "shift-alt-/": "agent::ToggleModelSelector", - "ctrl-shift-a": "agent::ToggleContextPicker", - "ctrl-shift-j": "agent::ToggleNavigationMenu", - "ctrl-alt-i": "agent::ToggleOptionsMenu", - // "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", + "shift-alt-j": "agent::ToggleNavigationMenu", + "shift-alt-i": "agent::ToggleOptionsMenu", + "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl-shift-.": "agent::AddSelectionToThread", - "shift-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", "super-ctrl-b": "agent::ToggleBurnMode", "alt-enter": "agent::ContinueWithBurnMode", - "ctrl-y": "agent::AllowOnce", + "shift-alt-a": "agent::AllowOnce", "ctrl-alt-y": "agent::AllowAlways", - "ctrl-alt-z": "agent::RejectOnce" - } + "shift-alt-z": "agent::RejectOnce", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AgentPanel > NavigationMenu", "use_key_equivalents": true, "bindings": { - "shift-backspace": "agent::DeleteRecentlyOpenThread" - } + "shift-backspace": "agent::DeleteRecentlyOpenThread", + }, }, { "context": "AgentPanel > Markdown", "use_key_equivalents": true, "bindings": { - "ctrl-c": "markdown::CopyAsMarkdown" - } + "ctrl-c": "markdown::CopyAsMarkdown", + }, }, { "context": "AgentPanel && text_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewTextThread", - "ctrl-alt-t": "agent::NewThread" - } + "ctrl-alt-t": "agent::NewThread", + }, }, { "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewExternalAgentThread", - "ctrl-alt-t": "agent::NewThread" - } + "ctrl-alt-t": "agent::NewThread", + }, }, { "context": "MessageEditor && !Picker > Editor && !use_modifier_to_send", @@ -296,8 +295,8 @@ "ctrl-i": "agent::ToggleProfileSelector", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "MessageEditor && !Picker > Editor && use_modifier_to_send", @@ -308,8 +307,8 @@ "ctrl-i": "agent::ToggleProfileSelector", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", - "ctrl-shift-n": "agent::RejectAll" - } + "ctrl-shift-n": "agent::RejectAll", + }, }, { "context": "EditMessageEditor > Editor", @@ -317,8 +316,8 @@ "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AgentFeedbackMessageEditor > Editor", @@ -326,26 +325,14 @@ "bindings": { "escape": "menu::Cancel", "enter": "menu::Confirm", - "alt-enter": "editor::Newline" - } - }, - { - "context": "ContextStrip", - "use_key_equivalents": true, - "bindings": { - "up": "agent::FocusUp", - "right": "agent::FocusRight", - "left": "agent::FocusLeft", - "down": "agent::FocusDown", - "backspace": "agent::RemoveFocusedContext", - "enter": "agent::AcceptSuggestedContext" - } + "alt-enter": "editor::Newline", + }, }, { "context": "AcpThread > ModeSelector", "bindings": { - "ctrl-enter": "menu::Confirm" - } + "ctrl-enter": "menu::Confirm", + }, }, { "context": "AcpThread > Editor && !use_modifier_to_send", @@ -355,8 +342,9 @@ "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll", - "shift-tab": "agent::CycleModeSelector" - } + "shift-tab": "agent::CycleModeSelector", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "AcpThread > Editor && use_modifier_to_send", @@ -366,15 +354,16 @@ "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-y": "agent::KeepAll", "ctrl-shift-n": "agent::RejectAll", - "shift-tab": "agent::CycleModeSelector" - } + "shift-tab": "agent::CycleModeSelector", + "alt-tab": "agent::CycleFavoriteModels", + }, }, { "context": "ThreadHistory", "use_key_equivalents": true, "bindings": { - "backspace": "agent::RemoveSelectedThread" - } + "backspace": "agent::RemoveSelectedThread", + }, }, { "context": "RulesLibrary", @@ -382,8 +371,8 @@ "bindings": { "ctrl-n": "rules_library::NewRule", "ctrl-shift-s": "rules_library::ToggleDefaultRule", - "ctrl-w": "workspace::CloseWindow" - } + "ctrl-w": "workspace::CloseWindow", + }, }, { "context": "BufferSearchBar", @@ -396,24 +385,24 @@ "alt-enter": "search::SelectAllMatches", "ctrl-f": "search::FocusSearch", "ctrl-h": "search::ToggleReplace", - "ctrl-l": "search::ToggleSelection" - } + "ctrl-l": "search::ToggleSelection", + }, }, { "context": "BufferSearchBar && in_replace > Editor", "use_key_equivalents": true, "bindings": { "enter": "search::ReplaceNext", - "ctrl-enter": "search::ReplaceAll" - } + "ctrl-enter": "search::ReplaceAll", + }, }, { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar", @@ -422,24 +411,24 @@ "escape": "project_search::ToggleFocus", "ctrl-shift-f": "search::FocusSearch", "ctrl-shift-h": "search::ToggleReplace", - "alt-r": "search::ToggleRegex" // vscode - } + "alt-r": "search::ToggleRegex", // vscode + }, }, { "context": "ProjectSearchBar > Editor", "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", - "down": "search::NextHistoryQuery" - } + "down": "search::NextHistoryQuery", + }, }, { "context": "ProjectSearchBar && in_replace > Editor", "use_key_equivalents": true, "bindings": { "enter": "search::ReplaceNext", - "ctrl-alt-enter": "search::ReplaceAll" - } + "ctrl-alt-enter": "search::ReplaceAll", + }, }, { "context": "ProjectSearchView", @@ -447,8 +436,8 @@ "bindings": { "escape": "project_search::ToggleFocus", "ctrl-shift-h": "search::ToggleReplace", - "alt-r": "search::ToggleRegex" // vscode - } + "alt-r": "search::ToggleRegex", // vscode + }, }, { "context": "Pane", @@ -479,8 +468,10 @@ "ctrl-k ctrl-w": "workspace::CloseAllItemsAndPanes", "back": "pane::GoBack", "alt--": "pane::GoBack", + "alt-left": "pane::GoBack", "forward": "pane::GoForward", "alt-=": "pane::GoForward", + "alt-right": "pane::GoForward", "f3": "search::SelectNextMatch", "shift-f3": "search::SelectPreviousMatch", "ctrl-shift-f": "project_search::ToggleFocus", @@ -493,8 +484,8 @@ "shift-enter": "project_search::ToggleAllSearchResults", "alt-r": "search::ToggleRegex", // "ctrl-shift-alt-x": "search::ToggleRegex", - "ctrl-k shift-enter": "pane::TogglePinTab" - } + "ctrl-k shift-enter": "pane::TogglePinTab", + }, }, // Bindings from VS Code { @@ -503,8 +494,8 @@ "bindings": { "ctrl-[": "editor::Outdent", "ctrl-]": "editor::Indent", - "ctrl-shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above - "ctrl-shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below + "ctrl-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above + "ctrl-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below "ctrl-shift-k": "editor::DeleteLine", "alt-up": "editor::MoveLineUp", "alt-down": "editor::MoveLineDown", @@ -515,24 +506,21 @@ "ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection "ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word "ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand - "ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch - "ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch + "ctrl-f3": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand "ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip - "ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch + "ctrl-shift-f3": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand "ctrl-k ctrl-i": "editor::Hover", "ctrl-k ctrl-b": "editor::BlameHover", + "ctrl-k ctrl-f": "editor::FormatSelections", "ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }], + "ctrl-k ctrl-c": ["editor::ToggleComments", { "advance_downwards": false }], "f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }], "shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }], "f2": "editor::Rename", "f12": "editor::GoToDefinition", "alt-f12": "editor::GoToDefinitionSplit", - "ctrl-shift-f10": "editor::GoToDefinitionSplit", "ctrl-f12": "editor::GoToImplementation", - "shift-f12": "editor::GoToTypeDefinition", - "ctrl-alt-f12": "editor::GoToTypeDefinitionSplit", "shift-alt-f12": "editor::FindAllReferences", - "ctrl-m": "editor::MoveToEnclosingBracket", // from jetbrains "ctrl-shift-\\": "editor::MoveToEnclosingBracket", "ctrl-shift-[": "editor::Fold", "ctrl-shift-]": "editor::UnfoldLines", @@ -556,34 +544,33 @@ "ctrl-k r": "editor::RevealInFileManager", "ctrl-k p": "editor::CopyPath", "ctrl-\\": "pane::SplitRight", - "ctrl-shift-alt-c": "editor::DisplayCursorNames", "alt-.": "editor::GoToHunk", - "alt-,": "editor::GoToPreviousHunk" - } + "alt-,": "editor::GoToPreviousHunk", + }, }, { "context": "Editor && extension == md", "use_key_equivalents": true, "bindings": { "ctrl-k v": "markdown::OpenPreviewToTheSide", - "ctrl-shift-v": "markdown::OpenPreview" - } + "ctrl-shift-v": "markdown::OpenPreview", + }, }, { "context": "Editor && extension == svg", "use_key_equivalents": true, "bindings": { "ctrl-k v": "svg::OpenPreviewToTheSide", - "ctrl-shift-v": "svg::OpenPreview" - } + "ctrl-shift-v": "svg::OpenPreview", + }, }, { "context": "Editor && mode == full", "use_key_equivalents": true, "bindings": { "ctrl-shift-o": "outline::Toggle", - "ctrl-g": "go_to_line::Toggle" - } + "ctrl-g": "go_to_line::Toggle", + }, }, { "context": "Workspace", @@ -631,8 +618,8 @@ "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", - "ctrl-tab": "tab_switcher::Toggle", "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }], + "ctrl-tab": "tab_switcher::Toggle", "ctrl-e": "file_finder::Toggle", "f1": "command_palette::Toggle", "ctrl-shift-p": "command_palette::Toggle", @@ -667,22 +654,22 @@ // "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }], "f5": "debugger::Rerun", "ctrl-f4": "workspace::CloseActiveDock", - "ctrl-w": "workspace::CloseActiveDock" - } + "ctrl-w": "workspace::CloseActiveDock", + }, }, { "context": "Workspace && debugger_running", "use_key_equivalents": true, "bindings": { - "f5": "zed::NoAction" - } + "f5": "zed::NoAction", + }, }, { "context": "Workspace && debugger_stopped", "use_key_equivalents": true, "bindings": { - "f5": "debugger::Continue" - } + "f5": "debugger::Continue", + }, }, { "context": "ApplicationMenu", @@ -690,8 +677,8 @@ "bindings": { "f10": "menu::Cancel", "left": "app_menu::ActivateMenuLeft", - "right": "app_menu::ActivateMenuRight" - } + "right": "app_menu::ActivateMenuRight", + }, }, // Bindings from Sublime Text { @@ -708,8 +695,8 @@ "ctrl-alt-left": "editor::MoveToPreviousSubwordStart", "ctrl-alt-right": "editor::MoveToNextSubwordEnd", "ctrl-shift-alt-left": "editor::SelectToPreviousSubwordStart", - "ctrl-shift-alt-right": "editor::SelectToNextSubwordEnd" - } + "ctrl-shift-alt-right": "editor::SelectToNextSubwordEnd", + }, }, // Bindings from Atom { @@ -719,16 +706,16 @@ "ctrl-k up": "pane::SplitUp", "ctrl-k down": "pane::SplitDown", "ctrl-k left": "pane::SplitLeft", - "ctrl-k right": "pane::SplitRight" - } + "ctrl-k right": "pane::SplitRight", + }, }, // Bindings that should be unified with bindings for more general actions { "context": "Editor && renaming", "use_key_equivalents": true, "bindings": { - "enter": "editor::ConfirmRename" - } + "enter": "editor::ConfirmRename", + }, }, { "context": "Editor && showing_completions", @@ -736,22 +723,22 @@ "bindings": { "enter": "editor::ConfirmCompletion", "shift-enter": "editor::ConfirmCompletionReplace", - "tab": "editor::ComposeCompletion" - } + "tab": "editor::ComposeCompletion", + }, }, { "context": "Editor && in_snippet && has_next_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "tab": "editor::NextSnippetTabstop" - } + "tab": "editor::NextSnippetTabstop", + }, }, { "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions", "use_key_equivalents": true, "bindings": { - "shift-tab": "editor::PreviousSnippetTabstop" - } + "shift-tab": "editor::PreviousSnippetTabstop", + }, }, // Bindings for accepting edit predictions // @@ -764,8 +751,9 @@ "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", "tab": "editor::AcceptEditPrediction", - "alt-right": "editor::AcceptPartialEditPrediction" - } + "alt-right": "editor::AcceptNextWordEditPrediction", + "alt-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && edit_prediction_conflict", @@ -773,15 +761,16 @@ "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "alt-right": "editor::AcceptPartialEditPrediction" - } + "alt-right": "editor::AcceptNextWordEditPrediction", + "alt-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Editor && showing_code_actions", "use_key_equivalents": true, "bindings": { - "enter": "editor::ConfirmCodeAction" - } + "enter": "editor::ConfirmCodeAction", + }, }, { "context": "Editor && (showing_code_actions || showing_completions)", @@ -792,16 +781,16 @@ "ctrl-n": "editor::ContextMenuNext", "down": "editor::ContextMenuNext", "pageup": "editor::ContextMenuFirst", - "pagedown": "editor::ContextMenuLast" - } + "pagedown": "editor::ContextMenuLast", + }, }, { "context": "Editor && showing_signature_help && !showing_completions", "use_key_equivalents": true, "bindings": { "up": "editor::SignatureHelpPrevious", - "down": "editor::SignatureHelpNext" - } + "down": "editor::SignatureHelpNext", + }, }, // Custom bindings { @@ -809,15 +798,15 @@ "bindings": { "ctrl-shift-alt-f": "workspace::FollowNextCollaborator", // Only available in debug builds: opens an element inspector for development. - "shift-alt-i": "dev::ToggleInspector" - } + "shift-alt-i": "dev::ToggleInspector", + }, }, { "context": "!Terminal", "use_key_equivalents": true, "bindings": { - "ctrl-shift-c": "collab_panel::ToggleFocus" - } + "ctrl-shift-c": "collab_panel::ToggleFocus", + }, }, { "context": "!ContextEditor > Editor && mode == full", @@ -830,8 +819,8 @@ "ctrl-f8": "editor::GoToHunk", "ctrl-shift-f8": "editor::GoToPreviousHunk", "ctrl-enter": "assistant::InlineAssist", - "ctrl-shift-;": "editor::ToggleInlayHints" - } + "ctrl-shift-;": "editor::ToggleInlayHints", + }, }, { "context": "PromptEditor", @@ -839,8 +828,9 @@ "bindings": { "ctrl-[": "agent::CyclePreviousInlineAssist", "ctrl-]": "agent::CycleNextInlineAssist", - "shift-alt-e": "agent::RemoveAllContext" - } + "ctrl-shift-enter": "inline_assistant::ThumbsUpResult", + "ctrl-shift-delete": "inline_assistant::ThumbsDownResult", + }, }, { "context": "Prompt", @@ -849,15 +839,15 @@ "left": "menu::SelectPrevious", "right": "menu::SelectNext", "h": "menu::SelectPrevious", - "l": "menu::SelectNext" - } + "l": "menu::SelectNext", + }, }, { "context": "ProjectSearchBar && !in_replace", "use_key_equivalents": true, "bindings": { - "ctrl-enter": "project_search::SearchInNew" - } + "ctrl-enter": "project_search::SearchInNew", + }, }, { "context": "OutlinePanel && not_editing", @@ -872,14 +862,15 @@ "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", "alt-enter": "editor::OpenExcerpts", - "ctrl-alt-enter": "editor::OpenExcerptsSplit" - } + "ctrl-alt-enter": "editor::OpenExcerptsSplit", + }, }, { "context": "ProjectPanel", "use_key_equivalents": true, "bindings": { "left": "project_panel::CollapseSelectedEntry", + "ctrl-left": "project_panel::CollapseAllEntries", "right": "project_panel::ExpandSelectedEntry", "ctrl-n": "project_panel::NewFile", "alt-n": "project_panel::NewDirectory", @@ -903,22 +894,24 @@ "ctrl-k ctrl-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ProjectPanel && not_editing", "use_key_equivalents": true, "bindings": { - "space": "project_panel::Open" - } + "space": "project_panel::Open", + }, }, { "context": "GitPanel && ChangesList", "use_key_equivalents": true, "bindings": { - "up": "menu::SelectPrevious", - "down": "menu::SelectNext", + "up": "git_panel::PreviousEntry", + "down": "git_panel::NextEntry", + "left": "git_panel::CollapseSelectedEntry", + "right": "git_panel::ExpandSelectedEntry", "enter": "menu::Confirm", "alt-y": "git::StageFile", "shift-alt-y": "git::UnstageFile", @@ -932,15 +925,15 @@ "backspace": ["git::RestoreFile", { "skip_prompt": false }], "shift-delete": ["git::RestoreFile", { "skip_prompt": false }], "ctrl-backspace": ["git::RestoreFile", { "skip_prompt": false }], - "ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }] - } + "ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }], + }, }, { "context": "GitPanel && CommitEditor", "use_key_equivalents": true, "bindings": { - "escape": "git::Cancel" - } + "escape": "git::Cancel", + }, }, { "context": "GitCommit > Editor", @@ -950,8 +943,8 @@ "enter": "editor::Newline", "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", - "alt-l": "git::GenerateCommitMessage" - } + "alt-l": "git::GenerateCommitMessage", + }, }, { "context": "GitPanel", @@ -968,8 +961,8 @@ "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", "ctrl-enter": "git::Commit", - "ctrl-shift-enter": "git::Amend" - } + "ctrl-shift-enter": "git::Amend", + }, }, { "context": "GitDiff > Editor", @@ -978,15 +971,15 @@ "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", - "ctrl-shift-space": "git::UnstageAll" - } + "ctrl-shift-space": "git::UnstageAll", + }, }, { "context": "AskPass > Editor", "use_key_equivalents": true, "bindings": { - "enter": "menu::Confirm" - } + "enter": "menu::Confirm", + }, }, { "context": "CommitEditor > Editor", @@ -999,8 +992,8 @@ "ctrl-enter": "git::Commit", "ctrl-shift-enter": "git::Amend", "alt-up": "git_panel::FocusChanges", - "alt-l": "git::GenerateCommitMessage" - } + "alt-l": "git::GenerateCommitMessage", + }, }, { "context": "DebugPanel", @@ -1008,8 +1001,8 @@ "bindings": { "ctrl-t": "debugger::ToggleThreadPicker", "ctrl-i": "debugger::ToggleSessionPicker", - "shift-alt-escape": "debugger::ToggleExpandItem" - } + "shift-alt-escape": "debugger::ToggleExpandItem", + }, }, { "context": "VariableList", @@ -1022,8 +1015,8 @@ "ctrl-alt-c": "variable_list::CopyVariableName", "delete": "variable_list::RemoveWatch", "backspace": "variable_list::RemoveWatch", - "alt-enter": "variable_list::AddWatch" - } + "alt-enter": "variable_list::AddWatch", + }, }, { "context": "BreakpointList", @@ -1032,16 +1025,16 @@ "space": "debugger::ToggleEnableBreakpoint", "backspace": "debugger::UnsetBreakpoint", "left": "debugger::PreviousBreakpointProperty", - "right": "debugger::NextBreakpointProperty" - } + "right": "debugger::NextBreakpointProperty", + }, }, { "context": "CollabPanel && not_editing", "use_key_equivalents": true, "bindings": { "ctrl-backspace": "collab_panel::Remove", - "space": "menu::Confirm" - } + "space": "menu::Confirm", + }, }, { "context": "CollabPanel", @@ -1049,22 +1042,22 @@ "bindings": { "alt-up": "collab_panel::MoveChannelUp", "alt-down": "collab_panel::MoveChannelDown", - "alt-enter": "collab_panel::OpenSelectedChannelNotes" - } + "alt-enter": "collab_panel::OpenSelectedChannelNotes", + }, }, { "context": "(CollabPanel && editing) > Editor", "use_key_equivalents": true, "bindings": { - "space": "collab_panel::InsertSpace" - } + "space": "collab_panel::InsertSpace", + }, }, { "context": "ChannelModal", "use_key_equivalents": true, "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "Picker > Editor", @@ -1074,22 +1067,22 @@ "up": "menu::SelectPrevious", "down": "menu::SelectNext", "tab": "picker::ConfirmCompletion", - "alt-enter": ["picker::ConfirmInput", { "secondary": false }] - } + "alt-enter": ["picker::ConfirmInput", { "secondary": false }], + }, }, { "context": "ChannelModal > Picker > Editor", "use_key_equivalents": true, "bindings": { - "tab": "channel_modal::ToggleMode" - } + "tab": "channel_modal::ToggleMode", + }, }, { "context": "ToolchainSelector", "use_key_equivalents": true, "bindings": { - "ctrl-shift-a": "toolchain::AddToolchain" - } + "ctrl-shift-a": "toolchain::AddToolchain", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor)", @@ -1097,8 +1090,8 @@ "bindings": { "ctrl-p": "file_finder::Toggle", "ctrl-shift-a": "file_finder::ToggleSplitMenu", - "ctrl-shift-i": "file_finder::ToggleFilterMenu" - } + "ctrl-shift-i": "file_finder::ToggleFilterMenu", + }, }, { "context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)", @@ -1108,8 +1101,8 @@ "ctrl-j": "pane::SplitDown", "ctrl-k": "pane::SplitUp", "ctrl-h": "pane::SplitLeft", - "ctrl-l": "pane::SplitRight" - } + "ctrl-l": "pane::SplitRight", + }, }, { "context": "TabSwitcher", @@ -1118,16 +1111,16 @@ "ctrl-shift-tab": "menu::SelectPrevious", "ctrl-up": "menu::SelectPrevious", "ctrl-down": "menu::SelectNext", - "ctrl-backspace": "tab_switcher::CloseSelectedItem" - } + "ctrl-backspace": "tab_switcher::CloseSelectedItem", + }, }, { "context": "StashList || (StashList > Picker > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-shift-backspace": "stash_picker::DropStashItem", - "ctrl-shift-v": "stash_picker::ShowStashItem" - } + "ctrl-shift-v": "stash_picker::ShowStashItem", + }, }, { "context": "Terminal", @@ -1139,7 +1132,7 @@ "shift-insert": "terminal::Paste", "ctrl-v": "terminal::Paste", "ctrl-shift-v": "terminal::Paste", - "ctrl-enter": "assistant::InlineAssist", + "ctrl-i": "assistant::InlineAssist", "alt-b": ["terminal::SendText", "\u001bb"], "alt-f": ["terminal::SendText", "\u001bf"], "alt-.": ["terminal::SendText", "\u001b."], @@ -1151,6 +1144,8 @@ "ctrl-e": ["terminal::SendKeystroke", "ctrl-e"], "ctrl-o": ["terminal::SendKeystroke", "ctrl-o"], "ctrl-w": ["terminal::SendKeystroke", "ctrl-w"], + "ctrl-q": ["terminal::SendKeystroke", "ctrl-q"], + "ctrl-r": ["terminal::SendKeystroke", "ctrl-r"], "ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"], "ctrl-shift-a": "editor::SelectAll", "ctrl-shift-f": "buffer_search::Deploy", @@ -1172,21 +1167,21 @@ "ctrl-shift-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask", "alt-t": "terminal::RerunTask", - "ctrl-shift-5": "pane::SplitRight" - } + "ctrl-shift-5": "pane::SplitRight", + }, }, { "context": "Terminal && selection", "bindings": { - "ctrl-c": "terminal::Copy" - } + "ctrl-c": "terminal::Copy", + }, }, { "context": "ZedPredictModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "ConfigureContextServerModal > Editor", @@ -1194,53 +1189,57 @@ "bindings": { "escape": "menu::Cancel", "enter": "editor::Newline", - "ctrl-enter": "menu::Confirm" - } + "ctrl-enter": "menu::Confirm", + }, }, { "context": "ContextServerToolsModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "OnboardingAiConfigurationModal", "use_key_equivalents": true, "bindings": { - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "Diagnostics", "use_key_equivalents": true, "bindings": { - "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh" - } + "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh", + }, }, { "context": "DebugConsole > Editor", "use_key_equivalents": true, "bindings": { "enter": "menu::Confirm", - "alt-enter": "console::WatchExpression" - } + "alt-enter": "console::WatchExpression", + }, }, { "context": "RunModal", "use_key_equivalents": true, "bindings": { "ctrl-tab": "pane::ActivateNextItem", - "ctrl-shift-tab": "pane::ActivatePreviousItem" - } + "ctrl-shift-tab": "pane::ActivatePreviousItem", + }, }, { "context": "MarkdownPreview", "use_key_equivalents": true, "bindings": { - "pageup": "markdown::MovePageUp", - "pagedown": "markdown::MovePageDown" - } + "pageup": "markdown::ScrollPageUp", + "pagedown": "markdown::ScrollPageDown", + "up": "markdown::ScrollUp", + "down": "markdown::ScrollDown", + "alt-up": "markdown::ScrollUpByItem", + "alt-down": "markdown::ScrollDownByItem", + }, }, { "context": "KeymapEditor", @@ -1253,8 +1252,8 @@ "alt-enter": "keymap_editor::CreateBinding", "ctrl-c": "keymap_editor::CopyAction", "ctrl-shift-c": "keymap_editor::CopyContext", - "ctrl-t": "keymap_editor::ShowMatchingKeybinds" - } + "ctrl-t": "keymap_editor::ShowMatchingKeybinds", + }, }, { "context": "KeystrokeInput", @@ -1262,41 +1261,60 @@ "bindings": { "enter": "keystroke_input::StartRecording", "escape escape escape": "keystroke_input::StopRecording", - "delete": "keystroke_input::ClearKeystrokes" - } + "delete": "keystroke_input::ClearKeystrokes", + }, }, { "context": "KeybindEditorModal", "use_key_equivalents": true, "bindings": { "ctrl-enter": "menu::Confirm", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "KeybindEditorModal > Editor", "use_key_equivalents": true, "bindings": { "up": "menu::SelectPrevious", - "down": "menu::SelectNext" - } + "down": "menu::SelectNext", + }, }, { "context": "Onboarding", "use_key_equivalents": true, "bindings": { + "ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }], + "ctrl-0": ["zed::ResetUiFontSize", { "persist": false }], "ctrl-enter": "onboarding::Finish", "alt-shift-l": "onboarding::SignIn", - "shift-alt-a": "onboarding::OpenAccount" - } + "shift-alt-a": "onboarding::OpenAccount", + }, + }, + { + "context": "Welcome", + "use_key_equivalents": true, + "bindings": { + "ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }], + "ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }], + "ctrl-0": ["zed::ResetUiFontSize", { "persist": false }], + "ctrl-1": ["welcome::OpenRecentProject", 0], + "ctrl-2": ["welcome::OpenRecentProject", 1], + "ctrl-3": ["welcome::OpenRecentProject", 2], + "ctrl-4": ["welcome::OpenRecentProject", 3], + "ctrl-5": ["welcome::OpenRecentProject", 4], + }, }, { "context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)", "use_key_equivalents": true, "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", - "ctrl-space": "git::WorktreeFromDefault" - } + "ctrl-space": "git::WorktreeFromDefault", + }, }, { "context": "SettingsWindow", @@ -1306,6 +1324,7 @@ "escape": "workspace::CloseWindow", "ctrl-m": "settings_editor::Minimize", "ctrl-f": "search::FocusSearch", + "ctrl-,": "settings_editor::OpenCurrentFile", "left": "settings_editor::ToggleFocusNav", "ctrl-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI @@ -1320,8 +1339,8 @@ "ctrl-9": ["settings_editor::FocusFile", 8], "ctrl-0": ["settings_editor::FocusFile", 9], "ctrl-pageup": "settings_editor::FocusPreviousFile", - "ctrl-pagedown": "settings_editor::FocusNextFile" - } + "ctrl-pagedown": "settings_editor::FocusNextFile", + }, }, { "context": "StashDiff > Editor", @@ -1329,8 +1348,8 @@ "bindings": { "ctrl-space": "git::ApplyCurrentStash", "ctrl-shift-space": "git::PopCurrentStash", - "ctrl-shift-backspace": "git::DropCurrentStash" - } + "ctrl-shift-backspace": "git::DropCurrentStash", + }, }, { "context": "SettingsWindow > NavigationMenu", @@ -1345,22 +1364,22 @@ "pageup": "settings_editor::FocusPreviousRootNavEntry", "pagedown": "settings_editor::FocusNextRootNavEntry", "home": "settings_editor::FocusFirstNavEntry", - "end": "settings_editor::FocusLastNavEntry" - } + "end": "settings_editor::FocusLastNavEntry", + }, }, { - "context": "Zeta2Feedback > Editor", + "context": "EditPredictionContext > Editor", "bindings": { - "enter": "editor::Newline", - "ctrl-enter up": "dev::Zeta2RatePredictionPositive", - "ctrl-enter down": "dev::Zeta2RatePredictionNegative" - } + "alt-left": "dev::EditPredictionContextGoBack", + "alt-right": "dev::EditPredictionContextGoForward", + }, }, { - "context": "Zeta2Context > Editor", + "context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)", + "use_key_equivalents": true, "bindings": { - "alt-left": "dev::Zeta2ContextGoBack", - "alt-right": "dev::Zeta2ContextGoForward" - } - } + "ctrl-shift-backspace": "branch_picker::DeleteBranch", + "ctrl-shift-i": "branch_picker::FilterRemotes", + }, + }, ] diff --git a/assets/keymaps/initial.json b/assets/keymaps/initial.json index 8e4fe59f44ea7346a51e1c064ffa0553315da3b9..3a8d7f382aa57b39efc22845a17a4ef1bfd240ef 100644 --- a/assets/keymaps/initial.json +++ b/assets/keymaps/initial.json @@ -10,12 +10,12 @@ "context": "Workspace", "bindings": { // "shift shift": "file_finder::Toggle" - } + }, }, { "context": "Editor && vim_mode == insert", "bindings": { // "j k": "vim::NormalBefore" - } - } + }, + }, ] diff --git a/assets/keymaps/linux/atom.json b/assets/keymaps/linux/atom.json index 98992b19fac72055807063edae8b7b23652062d3..a15d4877aab79ac2e570697137ba89e3572d074e 100644 --- a/assets/keymaps/linux/atom.json +++ b/assets/keymaps/linux/atom.json @@ -4,15 +4,15 @@ "bindings": { "ctrl-shift-f5": "workspace::Reload", // window:reload "ctrl-k ctrl-n": "workspace::ActivatePreviousPane", // window:focus-next-pane - "ctrl-k ctrl-p": "workspace::ActivateNextPane" // window:focus-previous-pane - } + "ctrl-k ctrl-p": "workspace::ActivateNextPane", // window:focus-previous-pane + }, }, { "context": "Editor", "bindings": { "ctrl-k ctrl-u": "editor::ConvertToUpperCase", // editor:upper-case - "ctrl-k ctrl-l": "editor::ConvertToLowerCase" // editor:lower-case - } + "ctrl-k ctrl-l": "editor::ConvertToLowerCase", // editor:lower-case + }, }, { "context": "Editor && mode == full", @@ -32,8 +32,8 @@ "ctrl-down": "editor::MoveLineDown", // editor:move-line-down "ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle "ctrl-shift-m": "markdown::OpenPreviewToTheSide", // markdown-preview:toggle - "ctrl-r": "outline::Toggle" // symbols-view:toggle-project-symbols - } + "ctrl-r": "outline::Toggle", // symbols-view:toggle-project-symbols + }, }, { "context": "BufferSearchBar", @@ -41,8 +41,8 @@ "f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next "shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous "ctrl-f3": "search::SelectNextMatch", // find-and-replace:find-next-selected - "ctrl-shift-f3": "search::SelectPreviousMatch" // find-and-replace:find-previous-selected - } + "ctrl-shift-f3": "search::SelectPreviousMatch", // find-and-replace:find-previous-selected + }, }, { "context": "Workspace", @@ -50,8 +50,8 @@ "ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle "ctrl-k ctrl-b": "workspace::ToggleLeftDock", // tree-view:toggle "ctrl-t": "file_finder::Toggle", // fuzzy-finder:toggle-file-finder - "ctrl-r": "project_symbols::Toggle" // symbols-view:toggle-project-symbols - } + "ctrl-r": "project_symbols::Toggle", // symbols-view:toggle-project-symbols + }, }, { "context": "Pane", @@ -65,8 +65,8 @@ "ctrl-6": ["pane::ActivateItem", 5], // tree-view:open-selected-entry-in-pane-6 "ctrl-7": ["pane::ActivateItem", 6], // tree-view:open-selected-entry-in-pane-7 "ctrl-8": ["pane::ActivateItem", 7], // tree-view:open-selected-entry-in-pane-8 - "ctrl-9": ["pane::ActivateItem", 8] // tree-view:open-selected-entry-in-pane-9 - } + "ctrl-9": ["pane::ActivateItem", 8], // tree-view:open-selected-entry-in-pane-9 + }, }, { "context": "ProjectPanel", @@ -75,8 +75,8 @@ "backspace": ["project_panel::Trash", { "skip_prompt": false }], "ctrl-x": "project_panel::Cut", // tree-view:cut "ctrl-c": "project_panel::Copy", // tree-view:copy - "ctrl-v": "project_panel::Paste" // tree-view:paste - } + "ctrl-v": "project_panel::Paste", // tree-view:paste + }, }, { "context": "ProjectPanel && not_editing", @@ -90,7 +90,7 @@ "d": "project_panel::Duplicate", // tree-view:duplicate "home": "menu::SelectFirst", // core:move-to-top "end": "menu::SelectLast", // core:move-to-bottom - "shift-a": "project_panel::NewDirectory" // tree-view:add-folder - } - } + "shift-a": "project_panel::NewDirectory", // tree-view:add-folder + }, + }, ] diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index 4d2d13a90d96c31f72b1bb0ccc74608f81004eda..58a7309cf902a3f69f949830cace2200f41fb0fe 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -8,8 +8,8 @@ "ctrl-shift-i": "agent::ToggleFocus", "ctrl-l": "agent::ToggleFocus", "ctrl-shift-l": "agent::ToggleFocus", - "ctrl-shift-j": "agent::OpenSettings" - } + "ctrl-shift-j": "agent::OpenSettings", + }, }, { "context": "Editor && mode == full", @@ -20,18 +20,18 @@ "ctrl-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "ctrl-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "ctrl-k": "assistant::InlineAssist", - "ctrl-shift-k": "assistant::InsertIntoEditor" - } + "ctrl-shift-k": "assistant::InsertIntoEditor", + }, }, { "context": "InlineAssistEditor", "use_key_equivalents": true, "bindings": { - "ctrl-shift-backspace": "editor::Cancel" + "ctrl-shift-backspace": "editor::Cancel", // "alt-enter": // Quick Question // "ctrl-shift-enter": // Full File Context // "ctrl-shift-k": // Toggle input focus (editor <> inline assist) - } + }, }, { "context": "AgentPanel || ContextEditor || (MessageEditor > Editor)", @@ -47,7 +47,7 @@ "ctrl-shift-backspace": "editor::Cancel", "ctrl-r": "agent::NewThread", "ctrl-shift-v": "editor::Paste", - "ctrl-shift-k": "assistant::InsertIntoEditor" + "ctrl-shift-k": "assistant::InsertIntoEditor", // "escape": "agent::ToggleFocus" ///// Enable when Zed supports multiple thread tabs // "ctrl-t": // new thread tab @@ -56,28 +56,29 @@ ///// Enable if Zed adds support for keyboard navigation of thread elements // "tab": // cycle to next message // "shift-tab": // cycle to previous message - } + }, }, { "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { "ctrl-enter": "agent::KeepAll", - "ctrl-backspace": "agent::RejectAll" - } + "ctrl-backspace": "agent::RejectAll", + }, }, { "context": "Editor && mode == full && edit_prediction", "use_key_equivalents": true, "bindings": { - "ctrl-right": "editor::AcceptPartialEditPrediction" - } + "ctrl-right": "editor::AcceptNextWordEditPrediction", + "ctrl-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Terminal", "use_key_equivalents": true, "bindings": { - "ctrl-k": "assistant::InlineAssist" - } - } + "ctrl-k": "assistant::InlineAssist", + }, + }, ] diff --git a/assets/keymaps/linux/emacs.json b/assets/keymaps/linux/emacs.json index c5cf22c81220bf286187252394f8fde26bdd6509..5b6f841de07ac2f9bd45c73e032dea0ede409007 100755 --- a/assets/keymaps/linux/emacs.json +++ b/assets/keymaps/linux/emacs.json @@ -5,8 +5,8 @@ [ { "bindings": { - "ctrl-g": "menu::Cancel" - } + "ctrl-g": "menu::Cancel", + }, }, { // Workaround to avoid falling back to default bindings. @@ -18,8 +18,8 @@ "ctrl-g": null, // currently activates `go_to_line::Toggle` when there is nothing to cancel "ctrl-x": null, // currently activates `editor::Cut` if no following key is pressed for 1 second "ctrl-p": null, // currently activates `file_finder::Toggle` when the cursor is on the first character of the buffer - "ctrl-n": null // currently activates `workspace::NewFile` when the cursor is on the last character of the buffer - } + "ctrl-n": null, // currently activates `workspace::NewFile` when the cursor is on the last character of the buffer + }, }, { "context": "Editor", @@ -82,8 +82,8 @@ "ctrl-s": "buffer_search::Deploy", // isearch-forward "ctrl-r": "buffer_search::Deploy", // isearch-backward "alt-^": "editor::JoinLines", // join-line - "alt-q": "editor::Rewrap" // fill-paragraph - } + "alt-q": "editor::Rewrap", // fill-paragraph + }, }, { "context": "Editor && selection_mode", // region selection @@ -119,22 +119,22 @@ "alt->": "editor::SelectToEnd", "ctrl-home": "editor::SelectToBeginning", "ctrl-end": "editor::SelectToEnd", - "ctrl-g": "editor::Cancel" - } + "ctrl-g": "editor::Cancel", + }, }, { "context": "Editor && (showing_code_actions || showing_completions)", "bindings": { "ctrl-p": "editor::ContextMenuPrevious", - "ctrl-n": "editor::ContextMenuNext" - } + "ctrl-n": "editor::ContextMenuNext", + }, }, { "context": "Editor && showing_signature_help && !showing_completions", "bindings": { "ctrl-p": "editor::SignatureHelpPrevious", - "ctrl-n": "editor::SignatureHelpNext" - } + "ctrl-n": "editor::SignatureHelpNext", + }, }, // Example setting for using emacs-style tab // (i.e. indent the current line / selection or perform symbol completion depending on context) @@ -164,8 +164,8 @@ "ctrl-x ctrl-f": "file_finder::Toggle", // find-file "ctrl-x ctrl-s": "workspace::Save", // save-buffer "ctrl-x ctrl-w": "workspace::SaveAs", // write-file - "ctrl-x s": "workspace::SaveAll" // save-some-buffers - } + "ctrl-x s": "workspace::SaveAll", // save-some-buffers + }, }, { // Workaround to enable using native emacs from the Zed terminal. @@ -185,22 +185,22 @@ "ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-s": null, // save-buffer "ctrl-x ctrl-w": null, // write-file - "ctrl-x s": null // save-some-buffers - } + "ctrl-x s": null, // save-some-buffers + }, }, { "context": "BufferSearchBar > Editor", "bindings": { "ctrl-s": "search::SelectNextMatch", "ctrl-r": "search::SelectPreviousMatch", - "ctrl-g": "buffer_search::Dismiss" - } + "ctrl-g": "buffer_search::Dismiss", + }, }, { "context": "Pane", "bindings": { "ctrl-alt-left": "pane::GoBack", - "ctrl-alt-right": "pane::GoForward" - } - } + "ctrl-alt-right": "pane::GoForward", + }, + }, ] diff --git a/assets/keymaps/linux/jetbrains.json b/assets/keymaps/linux/jetbrains.json index cf28c43dbd7f8335f30ef7702e584bea5c0ba5e0..d3bf53a0d3694943252e0fccb2ac821cc6c2a6d3 100644 --- a/assets/keymaps/linux/jetbrains.json +++ b/assets/keymaps/linux/jetbrains.json @@ -1,18 +1,20 @@ [ { "bindings": { - "ctrl-alt-s": "zed::OpenSettingsFile", + "ctrl-alt-s": "zed::OpenSettings", "ctrl-{": "pane::ActivatePreviousItem", "ctrl-}": "pane::ActivateNextItem", "shift-escape": null, // Unmap workspace::zoom + "ctrl-~": "git::Branch", "ctrl-f2": "debugger::Stop", "f6": "debugger::Pause", "f7": "debugger::StepInto", "f8": "debugger::StepOver", "shift-f8": "debugger::StepOut", "f9": "debugger::Continue", - "alt-shift-f9": "debugger::Start" - } + "shift-f9": "debugger::Start", + "alt-shift-f9": "debugger::Start", + }, }, { "context": "Editor", @@ -46,7 +48,7 @@ "alt-f7": "editor::FindAllReferences", "ctrl-alt-f7": "editor::FindAllReferences", "ctrl-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock - "ctrl-alt-b": "editor::GoToDefinitionSplit", // Conflicts with workspace::ToggleRightDock + "ctrl-alt-b": "editor::GoToImplementation", // Conflicts with workspace::ToggleRightDock "ctrl-shift-b": "editor::GoToTypeDefinition", "ctrl-alt-shift-b": "editor::GoToTypeDefinitionSplit", "f2": "editor::GoToDiagnostic", @@ -60,24 +62,30 @@ "ctrl-shift-end": "editor::SelectToEnd", "ctrl-f8": "editor::ToggleBreakpoint", "ctrl-shift-f8": "editor::EditLogBreakpoint", - "ctrl-shift-u": "editor::ToggleCase" - } + "ctrl-shift-u": "editor::ToggleCase", + }, }, { "context": "Editor && mode == full", "bindings": { "ctrl-f12": "outline::Toggle", "ctrl-r": ["buffer_search::Deploy", { "replace_enabled": true }], + "ctrl-e": "file_finder::Toggle", "ctrl-shift-n": "file_finder::Toggle", + "ctrl-alt-n": "file_finder::Toggle", "ctrl-g": "go_to_line::Toggle", - "alt-enter": "editor::ToggleCodeActions" - } + "alt-enter": "editor::ToggleCodeActions", + "ctrl-space": "editor::ShowCompletions", + "ctrl-q": "editor::Hover", + "ctrl-p": "editor::ShowSignatureHelp", + "ctrl-\\": "assistant::InlineAssist", + }, }, { "context": "BufferSearchBar", "bindings": { - "shift-enter": "search::SelectPreviousMatch" - } + "shift-enter": "search::SelectPreviousMatch", + }, }, { "context": "BufferSearchBar || ProjectSearchBar", @@ -85,8 +93,8 @@ "alt-c": "search::ToggleCaseSensitive", "alt-e": "search::ToggleSelection", "alt-x": "search::ToggleRegex", - "alt-w": "search::ToggleWholeWord" - } + "alt-w": "search::ToggleWholeWord", + }, }, { "context": "Workspace", @@ -94,9 +102,13 @@ "ctrl-shift-f12": "workspace::ToggleAllDocks", "ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "alt-shift-f10": "task::Spawn", + "shift-f10": "task::Spawn", + "ctrl-f5": "task::Rerun", "ctrl-e": "file_finder::Toggle", - // "ctrl-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor + "ctrl-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor "ctrl-shift-n": "file_finder::Toggle", + "ctrl-alt-n": "file_finder::Toggle", + "ctrl-n": "project_symbols::Toggle", "ctrl-shift-a": "command_palette::Toggle", "shift shift": "command_palette::Toggle", "ctrl-alt-shift-n": "project_symbols::Toggle", @@ -104,8 +116,8 @@ "alt-1": "project_panel::ToggleFocus", "alt-5": "debug_panel::ToggleFocus", "alt-6": "diagnostics::Deploy", - "alt-7": "outline_panel::ToggleFocus" - } + "alt-7": "outline_panel::ToggleFocus", + }, }, { "context": "Pane", // this is to override the default Pane mappings to switch tabs @@ -119,22 +131,24 @@ "alt-7": "outline_panel::ToggleFocus", "alt-8": null, // Services (bottom dock) "alt-9": null, // Git History (bottom dock) - "alt-0": "git_panel::ToggleFocus" - } + "alt-0": "git_panel::ToggleFocus", + }, }, { "context": "Workspace || Editor", "bindings": { "alt-f12": "terminal_panel::Toggle", - "ctrl-shift-k": "git::Push" - } + "ctrl-shift-k": "git::Push", + }, }, { "context": "Pane", "bindings": { "ctrl-alt-left": "pane::GoBack", - "ctrl-alt-right": "pane::GoForward" - } + "ctrl-alt-right": "pane::GoForward", + "alt-left": "pane::ActivatePreviousItem", + "alt-right": "pane::ActivateNextItem", + }, }, { "context": "ProjectPanel", @@ -144,21 +158,19 @@ "backspace": ["project_panel::Trash", { "skip_prompt": false }], "delete": ["project_panel::Trash", { "skip_prompt": false }], "shift-delete": ["project_panel::Delete", { "skip_prompt": false }], - "shift-f6": "project_panel::Rename" - } + "shift-f6": "project_panel::Rename", + }, }, { "context": "Terminal", "bindings": { "ctrl-shift-t": "workspace::NewTerminal", "alt-f12": "workspace::CloseActiveDock", - "alt-left": "pane::ActivatePreviousItem", - "alt-right": "pane::ActivateNextItem", "ctrl-up": "terminal::ScrollLineUp", "ctrl-down": "terminal::ScrollLineDown", "shift-pageup": "terminal::ScrollPageUp", - "shift-pagedown": "terminal::ScrollPageDown" - } + "shift-pagedown": "terminal::ScrollPageDown", + }, }, { "context": "GitPanel", "bindings": { "alt-0": "workspace::CloseActiveDock" } }, { "context": "ProjectPanel", "bindings": { "alt-1": "workspace::CloseActiveDock" } }, @@ -169,7 +181,7 @@ "context": "Dock || Workspace || OutlinePanel || ProjectPanel || CollabPanel || (Editor && mode == auto_height)", "bindings": { "escape": "editor::ToggleFocus", - "shift-escape": "workspace::CloseActiveDock" - } - } + "shift-escape": "workspace::CloseActiveDock", + }, + }, ] diff --git a/assets/keymaps/linux/sublime_text.json b/assets/keymaps/linux/sublime_text.json index eefd59e5bd1aa48125d0c6e3d662f3cb4e270be7..1d689a6f5841a011768113257afbed2c447669ed 100644 --- a/assets/keymaps/linux/sublime_text.json +++ b/assets/keymaps/linux/sublime_text.json @@ -22,8 +22,8 @@ "ctrl-^": ["workspace::MoveItemToPane", { "destination": 5 }], "ctrl-&": ["workspace::MoveItemToPane", { "destination": 6 }], "ctrl-*": ["workspace::MoveItemToPane", { "destination": 7 }], - "ctrl-(": ["workspace::MoveItemToPane", { "destination": 8 }] - } + "ctrl-(": ["workspace::MoveItemToPane", { "destination": 8 }], + }, }, { "context": "Editor", @@ -55,20 +55,20 @@ "alt-right": "editor::MoveToNextSubwordEnd", "alt-left": "editor::MoveToPreviousSubwordStart", "alt-shift-right": "editor::SelectToNextSubwordEnd", - "alt-shift-left": "editor::SelectToPreviousSubwordStart" - } + "alt-shift-left": "editor::SelectToPreviousSubwordStart", + }, }, { "context": "Editor && mode == full", "bindings": { - "ctrl-r": "outline::Toggle" - } + "ctrl-r": "outline::Toggle", + }, }, { "context": "Editor && !agent_diff", "bindings": { - "ctrl-k ctrl-z": "git::Restore" - } + "ctrl-k ctrl-z": "git::Restore", + }, }, { "context": "Pane", @@ -83,15 +83,15 @@ "alt-6": ["pane::ActivateItem", 5], "alt-7": ["pane::ActivateItem", 6], "alt-8": ["pane::ActivateItem", 7], - "alt-9": "pane::ActivateLastItem" - } + "alt-9": "pane::ActivateLastItem", + }, }, { "context": "Workspace", "bindings": { "ctrl-k ctrl-b": "workspace::ToggleLeftDock", // "ctrl-0": "project_panel::ToggleFocus", // normally resets zoom - "shift-ctrl-r": "project_symbols::Toggle" - } - } + "shift-ctrl-r": "project_symbols::Toggle", + }, + }, ] diff --git a/assets/keymaps/macos/atom.json b/assets/keymaps/macos/atom.json index ca015b667faa05db53d8fdc3bd82352d9bcc62aa..bf049fd3cb3eca8fe8049fa4e0810f82b10a5bbc 100644 --- a/assets/keymaps/macos/atom.json +++ b/assets/keymaps/macos/atom.json @@ -4,16 +4,16 @@ "bindings": { "ctrl-alt-cmd-l": "workspace::Reload", "cmd-k cmd-p": "workspace::ActivatePreviousPane", - "cmd-k cmd-n": "workspace::ActivateNextPane" - } + "cmd-k cmd-n": "workspace::ActivateNextPane", + }, }, { "context": "Editor", "bindings": { "cmd-shift-backspace": "editor::DeleteToBeginningOfLine", "cmd-k cmd-u": "editor::ConvertToUpperCase", - "cmd-k cmd-l": "editor::ConvertToLowerCase" - } + "cmd-k cmd-l": "editor::ConvertToLowerCase", + }, }, { "context": "Editor && mode == full", @@ -33,8 +33,8 @@ "ctrl-cmd-down": "editor::MoveLineDown", "cmd-\\": "workspace::ToggleLeftDock", "ctrl-shift-m": "markdown::OpenPreviewToTheSide", - "cmd-r": "outline::Toggle" - } + "cmd-r": "outline::Toggle", + }, }, { "context": "BufferSearchBar", @@ -42,8 +42,8 @@ "cmd-g": ["editor::SelectNext", { "replace_newest": true }], "cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }], "cmd-f3": "search::SelectNextMatch", - "cmd-shift-f3": "search::SelectPreviousMatch" - } + "cmd-shift-f3": "search::SelectPreviousMatch", + }, }, { "context": "Workspace", @@ -51,8 +51,8 @@ "cmd-\\": "workspace::ToggleLeftDock", "cmd-k cmd-b": "workspace::ToggleLeftDock", "cmd-t": "file_finder::Toggle", - "cmd-shift-r": "project_symbols::Toggle" - } + "cmd-shift-r": "project_symbols::Toggle", + }, }, { "context": "Pane", @@ -67,8 +67,8 @@ "cmd-6": ["pane::ActivateItem", 5], "cmd-7": ["pane::ActivateItem", 6], "cmd-8": ["pane::ActivateItem", 7], - "cmd-9": "pane::ActivateLastItem" - } + "cmd-9": "pane::ActivateLastItem", + }, }, { "context": "ProjectPanel", @@ -77,8 +77,8 @@ "backspace": ["project_panel::Trash", { "skip_prompt": false }], "cmd-x": "project_panel::Cut", "cmd-c": "project_panel::Copy", - "cmd-v": "project_panel::Paste" - } + "cmd-v": "project_panel::Paste", + }, }, { "context": "ProjectPanel && not_editing", @@ -92,7 +92,7 @@ "d": "project_panel::Duplicate", "home": "menu::SelectFirst", "end": "menu::SelectLast", - "shift-a": "project_panel::NewDirectory" - } - } + "shift-a": "project_panel::NewDirectory", + }, + }, ] diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index 97abc7dd819485850107eca6762fc1ed60ec0515..93e259db37ac718d2e0258d83e4de436a0a378fd 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -8,8 +8,8 @@ "cmd-shift-i": "agent::ToggleFocus", "cmd-l": "agent::ToggleFocus", "cmd-shift-l": "agent::ToggleFocus", - "cmd-shift-j": "agent::OpenSettings" - } + "cmd-shift-j": "agent::OpenSettings", + }, }, { "context": "Editor && mode == full", @@ -20,19 +20,19 @@ "cmd-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "cmd-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "cmd-k": "assistant::InlineAssist", - "cmd-shift-k": "assistant::InsertIntoEditor" - } + "cmd-shift-k": "assistant::InsertIntoEditor", + }, }, { "context": "InlineAssistEditor", "use_key_equivalents": true, "bindings": { "cmd-shift-backspace": "editor::Cancel", - "cmd-enter": "menu::Confirm" + "cmd-enter": "menu::Confirm", // "alt-enter": // Quick Question // "cmd-shift-enter": // Full File Context // "cmd-shift-k": // Toggle input focus (editor <> inline assist) - } + }, }, { "context": "AgentPanel || ContextEditor || (MessageEditor > Editor)", @@ -48,7 +48,7 @@ "cmd-shift-backspace": "editor::Cancel", "cmd-r": "agent::NewThread", "cmd-shift-v": "editor::Paste", - "cmd-shift-k": "assistant::InsertIntoEditor" + "cmd-shift-k": "assistant::InsertIntoEditor", // "escape": "agent::ToggleFocus" ///// Enable when Zed supports multiple thread tabs // "cmd-t": // new thread tab @@ -57,28 +57,29 @@ ///// Enable if Zed adds support for keyboard navigation of thread elements // "tab": // cycle to next message // "shift-tab": // cycle to previous message - } + }, }, { "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { "cmd-enter": "agent::KeepAll", - "cmd-backspace": "agent::RejectAll" - } + "cmd-backspace": "agent::RejectAll", + }, }, { "context": "Editor && mode == full && edit_prediction", "use_key_equivalents": true, "bindings": { - "cmd-right": "editor::AcceptPartialEditPrediction" - } + "cmd-right": "editor::AcceptNextWordEditPrediction", + "cmd-down": "editor::AcceptNextLineEditPrediction", + }, }, { "context": "Terminal", "use_key_equivalents": true, "bindings": { - "cmd-k": "assistant::InlineAssist" - } - } + "cmd-k": "assistant::InlineAssist", + }, + }, ] diff --git a/assets/keymaps/macos/emacs.json b/assets/keymaps/macos/emacs.json index ea831c0c059ea082d002f3af01b8d97be9e86616..2f11e2ce00e8b60a0f1c85b5aeb204e866491a45 100755 --- a/assets/keymaps/macos/emacs.json +++ b/assets/keymaps/macos/emacs.json @@ -6,8 +6,8 @@ { "context": "!GitPanel", "bindings": { - "ctrl-g": "menu::Cancel" - } + "ctrl-g": "menu::Cancel", + }, }, { // Workaround to avoid falling back to default bindings. @@ -15,8 +15,8 @@ // NOTE: must be declared before the `Editor` override. "context": "Editor", "bindings": { - "ctrl-g": null // currently activates `go_to_line::Toggle` when there is nothing to cancel - } + "ctrl-g": null, // currently activates `go_to_line::Toggle` when there is nothing to cancel + }, }, { "context": "Editor", @@ -79,8 +79,8 @@ "ctrl-s": "buffer_search::Deploy", // isearch-forward "ctrl-r": "buffer_search::Deploy", // isearch-backward "alt-^": "editor::JoinLines", // join-line - "alt-q": "editor::Rewrap" // fill-paragraph - } + "alt-q": "editor::Rewrap", // fill-paragraph + }, }, { "context": "Editor && selection_mode", // region selection @@ -116,22 +116,22 @@ "alt->": "editor::SelectToEnd", "ctrl-home": "editor::SelectToBeginning", "ctrl-end": "editor::SelectToEnd", - "ctrl-g": "editor::Cancel" - } + "ctrl-g": "editor::Cancel", + }, }, { "context": "Editor && (showing_code_actions || showing_completions)", "bindings": { "ctrl-p": "editor::ContextMenuPrevious", - "ctrl-n": "editor::ContextMenuNext" - } + "ctrl-n": "editor::ContextMenuNext", + }, }, { "context": "Editor && showing_signature_help && !showing_completions", "bindings": { "ctrl-p": "editor::SignatureHelpPrevious", - "ctrl-n": "editor::SignatureHelpNext" - } + "ctrl-n": "editor::SignatureHelpNext", + }, }, // Example setting for using emacs-style tab // (i.e. indent the current line / selection or perform symbol completion depending on context) @@ -161,8 +161,8 @@ "ctrl-x ctrl-f": "file_finder::Toggle", // find-file "ctrl-x ctrl-s": "workspace::Save", // save-buffer "ctrl-x ctrl-w": "workspace::SaveAs", // write-file - "ctrl-x s": "workspace::SaveAll" // save-some-buffers - } + "ctrl-x s": "workspace::SaveAll", // save-some-buffers + }, }, { // Workaround to enable using native emacs from the Zed terminal. @@ -182,22 +182,22 @@ "ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-s": null, // save-buffer "ctrl-x ctrl-w": null, // write-file - "ctrl-x s": null // save-some-buffers - } + "ctrl-x s": null, // save-some-buffers + }, }, { "context": "BufferSearchBar > Editor", "bindings": { "ctrl-s": "search::SelectNextMatch", "ctrl-r": "search::SelectPreviousMatch", - "ctrl-g": "buffer_search::Dismiss" - } + "ctrl-g": "buffer_search::Dismiss", + }, }, { "context": "Pane", "bindings": { "ctrl-alt-left": "pane::GoBack", - "ctrl-alt-right": "pane::GoForward" - } - } + "ctrl-alt-right": "pane::GoForward", + }, + }, ] diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index e5e5aeb0b8516285136438d40b57fb17fc9a9777..9946d8b124957349181db659259174d906d08d3a 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -5,14 +5,16 @@ "cmd-}": "pane::ActivateNextItem", "cmd-0": "git_panel::ToggleFocus", // overrides `cmd-0` zoom reset "shift-escape": null, // Unmap workspace::zoom + "cmd-~": "git::Branch", "ctrl-f2": "debugger::Stop", "f6": "debugger::Pause", "f7": "debugger::StepInto", "f8": "debugger::StepOver", "shift-f8": "debugger::StepOut", "f9": "debugger::Continue", - "alt-shift-f9": "debugger::Start" - } + "shift-f9": "debugger::Start", + "alt-shift-f9": "debugger::Start", + }, }, { "context": "Editor", @@ -45,7 +47,7 @@ "alt-f7": "editor::FindAllReferences", "cmd-alt-f7": "editor::FindAllReferences", "cmd-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock - "cmd-alt-b": "editor::GoToDefinitionSplit", + "cmd-alt-b": "editor::GoToImplementation", "cmd-shift-b": "editor::GoToTypeDefinition", "cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit", "f2": "editor::GoToDiagnostic", @@ -58,24 +60,30 @@ "cmd-shift-end": "editor::SelectToEnd", "ctrl-f8": "editor::ToggleBreakpoint", "ctrl-shift-f8": "editor::EditLogBreakpoint", - "cmd-shift-u": "editor::ToggleCase" - } + "cmd-shift-u": "editor::ToggleCase", + }, }, { "context": "Editor && mode == full", "bindings": { "cmd-f12": "outline::Toggle", "cmd-r": ["buffer_search::Deploy", { "replace_enabled": true }], - "cmd-shift-o": "file_finder::Toggle", "cmd-l": "go_to_line::Toggle", - "alt-enter": "editor::ToggleCodeActions" - } + "cmd-e": "file_finder::Toggle", + "cmd-shift-o": "file_finder::Toggle", + "cmd-shift-n": "file_finder::Toggle", + "alt-enter": "editor::ToggleCodeActions", + "ctrl-space": "editor::ShowCompletions", + "cmd-j": "editor::Hover", + "cmd-p": "editor::ShowSignatureHelp", + "cmd-\\": "assistant::InlineAssist", + }, }, { "context": "BufferSearchBar", "bindings": { - "shift-enter": "search::SelectPreviousMatch" - } + "shift-enter": "search::SelectPreviousMatch", + }, }, { "context": "BufferSearchBar || ProjectSearchBar", @@ -87,8 +95,8 @@ "ctrl-alt-c": "search::ToggleCaseSensitive", "ctrl-alt-e": "search::ToggleSelection", "ctrl-alt-w": "search::ToggleWholeWord", - "ctrl-alt-x": "search::ToggleRegex" - } + "ctrl-alt-x": "search::ToggleRegex", + }, }, { "context": "Workspace", @@ -96,9 +104,13 @@ "cmd-shift-f12": "workspace::ToggleAllDocks", "cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-alt-r": "task::Spawn", + "shift-f10": "task::Spawn", + "cmd-f5": "task::Rerun", "cmd-e": "file_finder::Toggle", - // "cmd-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor + "cmd-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor "cmd-shift-o": "file_finder::Toggle", + "cmd-shift-n": "file_finder::Toggle", + "cmd-n": "project_symbols::Toggle", "cmd-shift-a": "command_palette::Toggle", "shift shift": "command_palette::Toggle", "cmd-alt-o": "project_symbols::Toggle", // JetBrains: Go to Symbol @@ -106,8 +118,8 @@ "cmd-1": "project_panel::ToggleFocus", "cmd-5": "debug_panel::ToggleFocus", "cmd-6": "diagnostics::Deploy", - "cmd-7": "outline_panel::ToggleFocus" - } + "cmd-7": "outline_panel::ToggleFocus", + }, }, { "context": "Pane", // this is to override the default Pane mappings to switch tabs @@ -121,22 +133,24 @@ "cmd-7": "outline_panel::ToggleFocus", "cmd-8": null, // Services (bottom dock) "cmd-9": null, // Git History (bottom dock) - "cmd-0": "git_panel::ToggleFocus" - } + "cmd-0": "git_panel::ToggleFocus", + }, }, { "context": "Workspace || Editor", "bindings": { "alt-f12": "terminal_panel::Toggle", - "cmd-shift-k": "git::Push" - } + "cmd-shift-k": "git::Push", + }, }, { "context": "Pane", "bindings": { "cmd-alt-left": "pane::GoBack", - "cmd-alt-right": "pane::GoForward" - } + "cmd-alt-right": "pane::GoForward", + "alt-left": "pane::ActivatePreviousItem", + "alt-right": "pane::ActivateNextItem", + }, }, { "context": "ProjectPanel", @@ -147,8 +161,8 @@ "backspace": ["project_panel::Trash", { "skip_prompt": false }], "delete": ["project_panel::Trash", { "skip_prompt": false }], "shift-delete": ["project_panel::Delete", { "skip_prompt": false }], - "shift-f6": "project_panel::Rename" - } + "shift-f6": "project_panel::Rename", + }, }, { "context": "Terminal", @@ -158,8 +172,8 @@ "cmd-up": "terminal::ScrollLineUp", "cmd-down": "terminal::ScrollLineDown", "shift-pageup": "terminal::ScrollPageUp", - "shift-pagedown": "terminal::ScrollPageDown" - } + "shift-pagedown": "terminal::ScrollPageDown", + }, }, { "context": "GitPanel", "bindings": { "cmd-0": "workspace::CloseActiveDock" } }, { "context": "ProjectPanel", "bindings": { "cmd-1": "workspace::CloseActiveDock" } }, @@ -170,7 +184,7 @@ "context": "Dock || Workspace || OutlinePanel || ProjectPanel || CollabPanel || (Editor && mode == auto_height)", "bindings": { "escape": "editor::ToggleFocus", - "shift-escape": "workspace::CloseActiveDock" - } - } + "shift-escape": "workspace::CloseActiveDock", + }, + }, ] diff --git a/assets/keymaps/macos/sublime_text.json b/assets/keymaps/macos/sublime_text.json index d1bffca755b611d9046d4b7e794d2303835227a2..f4ae1ce5dda4e2c0dd21e97bd3a411dd4a4f3663 100644 --- a/assets/keymaps/macos/sublime_text.json +++ b/assets/keymaps/macos/sublime_text.json @@ -22,8 +22,8 @@ "ctrl-^": ["workspace::MoveItemToPane", { "destination": 5 }], "ctrl-&": ["workspace::MoveItemToPane", { "destination": 6 }], "ctrl-*": ["workspace::MoveItemToPane", { "destination": 7 }], - "ctrl-(": ["workspace::MoveItemToPane", { "destination": 8 }] - } + "ctrl-(": ["workspace::MoveItemToPane", { "destination": 8 }], + }, }, { "context": "Editor", @@ -57,20 +57,20 @@ "ctrl-right": "editor::MoveToNextSubwordEnd", "ctrl-left": "editor::MoveToPreviousSubwordStart", "ctrl-shift-right": "editor::SelectToNextSubwordEnd", - "ctrl-shift-left": "editor::SelectToPreviousSubwordStart" - } + "ctrl-shift-left": "editor::SelectToPreviousSubwordStart", + }, }, { "context": "Editor && mode == full", "bindings": { - "cmd-r": "outline::Toggle" - } + "cmd-r": "outline::Toggle", + }, }, { "context": "Editor && !agent_diff", "bindings": { - "cmd-k cmd-z": "git::Restore" - } + "cmd-k cmd-z": "git::Restore", + }, }, { "context": "Pane", @@ -85,8 +85,8 @@ "cmd-6": ["pane::ActivateItem", 5], "cmd-7": ["pane::ActivateItem", 6], "cmd-8": ["pane::ActivateItem", 7], - "cmd-9": "pane::ActivateLastItem" - } + "cmd-9": "pane::ActivateLastItem", + }, }, { "context": "Workspace", @@ -95,7 +95,7 @@ "cmd-t": "file_finder::Toggle", "shift-cmd-r": "project_symbols::Toggle", // Currently busted: https://github.com/zed-industries/feedback/issues/898 - "ctrl-0": "project_panel::ToggleFocus" - } - } + "ctrl-0": "project_panel::ToggleFocus", + }, + }, ] diff --git a/assets/keymaps/macos/textmate.json b/assets/keymaps/macos/textmate.json index f91f39b7f5c079f81b5fcf8e28e2092a33ff1aa4..90450e60af7147f1394eb6cb4c1efc389edad2d0 100644 --- a/assets/keymaps/macos/textmate.json +++ b/assets/keymaps/macos/textmate.json @@ -2,8 +2,8 @@ { "bindings": { "cmd-shift-o": "projects::OpenRecent", - "cmd-alt-tab": "project_panel::ToggleFocus" - } + "cmd-alt-tab": "project_panel::ToggleFocus", + }, }, { "context": "Editor && mode == full", @@ -15,8 +15,8 @@ "cmd-enter": "editor::NewlineBelow", "cmd-alt-enter": "editor::NewlineAbove", "cmd-shift-l": "editor::SelectLine", - "cmd-shift-t": "outline::Toggle" - } + "cmd-shift-t": "outline::Toggle", + }, }, { "context": "Editor", @@ -41,30 +41,30 @@ "ctrl-u": "editor::ConvertToUpperCase", "ctrl-shift-u": "editor::ConvertToLowerCase", "ctrl-alt-u": "editor::ConvertToUpperCamelCase", - "ctrl-_": "editor::ConvertToSnakeCase" - } + "ctrl-_": "editor::ConvertToSnakeCase", + }, }, { "context": "BufferSearchBar", "bindings": { "ctrl-s": "search::SelectNextMatch", - "ctrl-shift-s": "search::SelectPreviousMatch" - } + "ctrl-shift-s": "search::SelectPreviousMatch", + }, }, { "context": "Workspace", "bindings": { "cmd-alt-ctrl-d": "workspace::ToggleLeftDock", "cmd-t": "file_finder::Toggle", - "cmd-shift-t": "project_symbols::Toggle" - } + "cmd-shift-t": "project_symbols::Toggle", + }, }, { "context": "Pane", "bindings": { "alt-cmd-r": "search::ToggleRegex", - "ctrl-tab": "project_panel::ToggleFocus" - } + "ctrl-tab": "project_panel::ToggleFocus", + }, }, { "context": "ProjectPanel", @@ -75,11 +75,11 @@ "return": "project_panel::Rename", "cmd-c": "project_panel::Copy", "cmd-v": "project_panel::Paste", - "cmd-alt-c": "project_panel::CopyPath" - } + "cmd-alt-c": "project_panel::CopyPath", + }, }, { "context": "Dock", - "bindings": {} - } + "bindings": {}, + }, ] diff --git a/assets/keymaps/storybook.json b/assets/keymaps/storybook.json index 9b92fbe1a3844043e379647d1dd6c57e082fdf77..432bdc7004a4c66b52e20282aba924611b204aa1 100644 --- a/assets/keymaps/storybook.json +++ b/assets/keymaps/storybook.json @@ -27,7 +27,7 @@ "backspace": "editor::Backspace", "delete": "editor::Delete", "left": "editor::MoveLeft", - "right": "editor::MoveRight" - } - } + "right": "editor::MoveRight", + }, + }, ] diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index c7b83daab67689d10a6b7c1e28312ceff4551e08..6e5d3423872a7dd83234b28e67c5082b36bd858f 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -180,10 +180,9 @@ "ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit", "ctrl-w space": "editor::OpenExcerptsSplit", "ctrl-w g space": "editor::OpenExcerptsSplit", - "ctrl-6": "pane::AlternateFile", "ctrl-^": "pane::AlternateFile", - ".": "vim::Repeat" - } + ".": "vim::Repeat", + }, }, { "context": "vim_mode == normal || vim_mode == visual || vim_mode == operator", @@ -224,8 +223,8 @@ "] r": "vim::GoToNextReference", // tree-sitter related commands "[ x": "vim::SelectLargerSyntaxNode", - "] x": "vim::SelectSmallerSyntaxNode" - } + "] x": "vim::SelectSmallerSyntaxNode", + }, }, { "context": "vim_mode == normal", @@ -262,16 +261,16 @@ "[ d": "editor::GoToPreviousDiagnostic", "] c": "editor::GoToHunk", "[ c": "editor::GoToPreviousHunk", - "g c": "vim::PushToggleComments" - } + "g c": "vim::PushToggleComments", + }, }, { "context": "VimControl && VimCount", "bindings": { "0": ["vim::Number", 0], ":": "vim::CountCommand", - "%": "vim::GoToPercentage" - } + "%": "vim::GoToPercentage", + }, }, { "context": "vim_mode == visual", @@ -323,8 +322,8 @@ "g w": "vim::Rewrap", "g ?": "vim::ConvertToRot13", // "g ?": "vim::ConvertToRot47", - "\"": "vim::PushRegister" - } + "\"": "vim::PushRegister", + }, }, { "context": "vim_mode == helix_select", @@ -344,8 +343,8 @@ "ctrl-pageup": "pane::ActivatePreviousItem", "ctrl-pagedown": "pane::ActivateNextItem", ".": "vim::Repeat", - "alt-.": "vim::RepeatFind" - } + "alt-.": "vim::RepeatFind", + }, }, { "context": "vim_mode == insert", @@ -375,8 +374,8 @@ "ctrl-r": "vim::PushRegister", "insert": "vim::ToggleReplace", "ctrl-o": "vim::TemporaryNormal", - "ctrl-s": "editor::ShowSignatureHelp" - } + "ctrl-s": "editor::ShowSignatureHelp", + }, }, { "context": "showing_completions", @@ -384,8 +383,8 @@ "ctrl-d": "vim::ScrollDown", "ctrl-u": "vim::ScrollUp", "ctrl-e": "vim::LineDown", - "ctrl-y": "vim::LineUp" - } + "ctrl-y": "vim::LineUp", + }, }, { "context": "(vim_mode == normal || vim_mode == helix_normal) && !menu", @@ -410,22 +409,31 @@ "shift-s": "vim::SubstituteLine", "\"": "vim::PushRegister", "ctrl-pagedown": "pane::ActivateNextItem", - "ctrl-pageup": "pane::ActivatePreviousItem" - } + "ctrl-pageup": "pane::ActivatePreviousItem", + }, }, { - "context": "vim_mode == helix_normal && !menu", + "context": "VimControl && vim_mode == helix_normal && !menu", "bindings": { + "j": ["vim::Down", { "display_lines": true }], + "down": ["vim::Down", { "display_lines": true }], + "k": ["vim::Up", { "display_lines": true }], + "up": ["vim::Up", { "display_lines": true }], + "g j": "vim::Down", + "g down": "vim::Down", + "g k": "vim::Up", + "g up": "vim::Up", + "escape": "vim::SwitchToHelixNormalMode", "i": "vim::HelixInsert", "a": "vim::HelixAppend", - "ctrl-[": "editor::Cancel" - } + "ctrl-[": "editor::Cancel", + }, }, { "context": "vim_mode == helix_select && !menu", "bindings": { - "escape": "vim::SwitchToHelixNormalMode" - } + "escape": "vim::SwitchToHelixNormalMode", + }, }, { "context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu", @@ -445,9 +453,9 @@ "shift-r": "editor::Paste", "`": "vim::ConvertToLowerCase", "alt-`": "vim::ConvertToUpperCase", - "insert": "vim::InsertBefore", + "insert": "vim::InsertBefore", // not a helix default "shift-u": "editor::Redo", - "ctrl-r": "vim::Redo", + "ctrl-r": "vim::Redo", // not a helix default "y": "vim::HelixYank", "p": "vim::HelixPaste", "shift-p": ["vim::HelixPaste", { "before": true }], @@ -476,31 +484,40 @@ "alt-p": "editor::SelectPreviousSyntaxNode", "alt-n": "editor::SelectNextSyntaxNode", + // Search + "n": "vim::HelixSelectNext", + "shift-n": "vim::HelixSelectPrevious", + // Goto mode "g e": "vim::EndOfDocument", "g h": "vim::StartOfLine", "g l": "vim::EndOfLine", - "g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s" + "g s": "vim::FirstNonWhitespace", "g t": "vim::WindowTop", "g c": "vim::WindowMiddle", "g b": "vim::WindowBottom", - "g r": "editor::FindAllReferences", // zed specific + "g r": "editor::FindAllReferences", "g n": "pane::ActivateNextItem", - "shift-l": "pane::ActivateNextItem", + "shift-l": "pane::ActivateNextItem", // not a helix default "g p": "pane::ActivatePreviousItem", - "shift-h": "pane::ActivatePreviousItem", - "g .": "vim::HelixGotoLastModification", // go to last modification + "shift-h": "pane::ActivatePreviousItem", // not a helix default + "g .": "vim::HelixGotoLastModification", + "g o": "editor::ToggleSelectedDiffHunks", // Zed specific + "g shift-o": "git::ToggleStaged", // Zed specific + "g shift-r": "git::Restore", // Zed specific + "g u": "git::StageAndNext", // Zed specific + "g shift-u": "git::UnstageAndNext", // Zed specific // Window mode + "space w v": "pane::SplitDown", + "space w s": "pane::SplitRight", "space w h": "workspace::ActivatePaneLeft", - "space w l": "workspace::ActivatePaneRight", - "space w k": "workspace::ActivatePaneUp", "space w j": "workspace::ActivatePaneDown", + "space w k": "workspace::ActivatePaneUp", + "space w l": "workspace::ActivatePaneRight", "space w q": "pane::CloseActiveItem", - "space w s": "pane::SplitRight", - "space w r": "pane::SplitRight", - "space w v": "pane::SplitDown", - "space w d": "pane::SplitDown", + "space w r": "pane::SplitRight", // not a helix default + "space w d": "pane::SplitDown", // not a helix default // Space mode "space f": "file_finder::Toggle", @@ -514,6 +531,7 @@ "space c": "editor::ToggleComments", "space p": "editor::Paste", "space y": "editor::Copy", + "space /": "pane::DeploySearch", // Other ":": "command_palette::Toggle", @@ -521,24 +539,22 @@ "]": ["vim::PushHelixNext", { "around": true }], "[": ["vim::PushHelixPrevious", { "around": true }], "g q": "vim::PushRewrap", - "g w": "vim::PushRewrap" - // "tab": "pane::ActivateNextItem", - // "shift-tab": "pane::ActivatePrevItem", - } + "g w": "vim::PushRewrap", // not a helix default & clashes with helix `goto_word` + }, }, { "context": "vim_mode == insert && !(showing_code_actions || showing_completions)", "bindings": { "ctrl-p": "editor::ShowWordCompletions", - "ctrl-n": "editor::ShowWordCompletions" - } + "ctrl-n": "editor::ShowWordCompletions", + }, }, { "context": "(vim_mode == insert || vim_mode == normal) && showing_signature_help && !showing_completions", "bindings": { "ctrl-p": "editor::SignatureHelpPrevious", - "ctrl-n": "editor::SignatureHelpNext" - } + "ctrl-n": "editor::SignatureHelpNext", + }, }, { "context": "vim_mode == replace", @@ -554,8 +570,8 @@ "backspace": "vim::UndoReplace", "tab": "vim::Tab", "enter": "vim::Enter", - "insert": "vim::InsertBefore" - } + "insert": "vim::InsertBefore", + }, }, { "context": "vim_mode == waiting", @@ -567,14 +583,14 @@ "escape": "vim::ClearOperators", "ctrl-k": ["vim::PushDigraph", {}], "ctrl-v": ["vim::PushLiteral", {}], - "ctrl-q": ["vim::PushLiteral", {}] - } + "ctrl-q": ["vim::PushLiteral", {}], + }, }, { "context": "Editor && vim_mode == waiting && (vim_operator == ys || vim_operator == cs)", "bindings": { - "escape": "vim::SwitchToNormalMode" - } + "escape": "vim::SwitchToNormalMode", + }, }, { "context": "vim_mode == operator", @@ -582,8 +598,8 @@ "ctrl-c": "vim::ClearOperators", "ctrl-[": "vim::ClearOperators", "escape": "vim::ClearOperators", - "g c": "vim::Comment" - } + "g c": "vim::Comment", + }, }, { "context": "vim_operator == a || vim_operator == i || vim_operator == cs || vim_operator == helix_next || vim_operator == helix_previous", @@ -620,14 +636,14 @@ "shift-i": ["vim::IndentObj", { "include_below": true }], "f": "vim::Method", "c": "vim::Class", - "e": "vim::EntireFile" - } + "e": "vim::EntireFile", + }, }, { "context": "vim_operator == helix_m", "bindings": { - "m": "vim::Matching" - } + "m": "vim::Matching", + }, }, { "context": "vim_operator == helix_next", @@ -644,8 +660,8 @@ "x": "editor::SelectSmallerSyntaxNode", "d": "editor::GoToDiagnostic", "c": "editor::GoToHunk", - "space": "vim::InsertEmptyLineBelow" - } + "space": "vim::InsertEmptyLineBelow", + }, }, { "context": "vim_operator == helix_previous", @@ -662,8 +678,8 @@ "x": "editor::SelectLargerSyntaxNode", "d": "editor::GoToPreviousDiagnostic", "c": "editor::GoToPreviousHunk", - "space": "vim::InsertEmptyLineAbove" - } + "space": "vim::InsertEmptyLineAbove", + }, }, { "context": "vim_operator == c", @@ -671,8 +687,8 @@ "c": "vim::CurrentLine", "x": "vim::Exchange", "d": "editor::Rename", // zed specific - "s": ["vim::PushChangeSurrounds", {}] - } + "s": ["vim::PushChangeSurrounds", {}], + }, }, { "context": "vim_operator == d", @@ -684,36 +700,36 @@ "shift-o": "git::ToggleStaged", "p": "git::Restore", // "d p" "u": "git::StageAndNext", // "d u" - "shift-u": "git::UnstageAndNext" // "d shift-u" - } + "shift-u": "git::UnstageAndNext", // "d shift-u" + }, }, { "context": "vim_operator == gu", "bindings": { "g u": "vim::CurrentLine", - "u": "vim::CurrentLine" - } + "u": "vim::CurrentLine", + }, }, { "context": "vim_operator == gU", "bindings": { "g shift-u": "vim::CurrentLine", - "shift-u": "vim::CurrentLine" - } + "shift-u": "vim::CurrentLine", + }, }, { "context": "vim_operator == g~", "bindings": { "g ~": "vim::CurrentLine", - "~": "vim::CurrentLine" - } + "~": "vim::CurrentLine", + }, }, { "context": "vim_operator == g?", "bindings": { "g ?": "vim::CurrentLine", - "?": "vim::CurrentLine" - } + "?": "vim::CurrentLine", + }, }, { "context": "vim_operator == gq", @@ -721,66 +737,66 @@ "g q": "vim::CurrentLine", "q": "vim::CurrentLine", "g w": "vim::CurrentLine", - "w": "vim::CurrentLine" - } + "w": "vim::CurrentLine", + }, }, { "context": "vim_operator == y", "bindings": { "y": "vim::CurrentLine", "v": "vim::PushForcedMotion", - "s": ["vim::PushAddSurrounds", {}] - } + "s": ["vim::PushAddSurrounds", {}], + }, }, { "context": "vim_operator == ys", "bindings": { - "s": "vim::CurrentLine" - } + "s": "vim::CurrentLine", + }, }, { "context": "vim_operator == >", "bindings": { - ">": "vim::CurrentLine" - } + ">": "vim::CurrentLine", + }, }, { "context": "vim_operator == <", "bindings": { - "<": "vim::CurrentLine" - } + "<": "vim::CurrentLine", + }, }, { "context": "vim_operator == eq", "bindings": { - "=": "vim::CurrentLine" - } + "=": "vim::CurrentLine", + }, }, { "context": "vim_operator == sh", "bindings": { - "!": "vim::CurrentLine" - } + "!": "vim::CurrentLine", + }, }, { "context": "vim_operator == gc", "bindings": { - "c": "vim::CurrentLine" - } + "c": "vim::CurrentLine", + }, }, { "context": "vim_operator == gR", "bindings": { "r": "vim::CurrentLine", - "shift-r": "vim::CurrentLine" - } + "shift-r": "vim::CurrentLine", + }, }, { "context": "vim_operator == cx", "bindings": { "x": "vim::CurrentLine", - "c": "vim::ClearExchange" - } + "c": "vim::ClearExchange", + }, }, { "context": "vim_mode == literal", @@ -822,15 +838,15 @@ "tab": ["vim::Literal", ["tab", "\u0009"]], // zed extensions: "backspace": ["vim::Literal", ["backspace", "\u0008"]], - "delete": ["vim::Literal", ["delete", "\u007F"]] - } + "delete": ["vim::Literal", ["delete", "\u007F"]], + }, }, { "context": "BufferSearchBar && !in_replace", "bindings": { "enter": "vim::SearchSubmit", - "escape": "buffer_search::Dismiss" - } + "escape": "buffer_search::Dismiss", + }, }, { "context": "VimControl && !menu || !Editor && !Terminal", @@ -853,6 +869,8 @@ "ctrl-w shift-right": "workspace::SwapPaneRight", "ctrl-w shift-up": "workspace::SwapPaneUp", "ctrl-w shift-down": "workspace::SwapPaneDown", + "ctrl-w x": "workspace::SwapPaneAdjacent", + "ctrl-w ctrl-x": "workspace::SwapPaneAdjacent", "ctrl-w shift-h": "workspace::MovePaneLeft", "ctrl-w shift-l": "workspace::MovePaneRight", "ctrl-w shift-k": "workspace::MovePaneUp", @@ -889,15 +907,19 @@ "ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal", "ctrl-w n": "workspace::NewFileSplitHorizontal", "g t": "vim::GoToTab", - "g shift-t": "vim::GoToPreviousTab" - } + "g shift-t": "vim::GoToPreviousTab", + }, }, { "context": "!Editor && !Terminal", "bindings": { ":": "command_palette::Toggle", - "g /": "pane::DeploySearch" - } + "g /": "pane::DeploySearch", + "] b": "pane::ActivateNextItem", + "[ b": "pane::ActivatePreviousItem", + "] shift-b": "pane::ActivateLastItem", + "[ shift-b": ["pane::ActivateItem", 0], + }, }, { // netrw compatibility @@ -947,17 +969,45 @@ "6": ["vim::Number", 6], "7": ["vim::Number", 7], "8": ["vim::Number", 8], - "9": ["vim::Number", 9] - } + "9": ["vim::Number", 9], + }, }, { "context": "OutlinePanel && not_editing", "bindings": { - "j": "menu::SelectNext", - "k": "menu::SelectPrevious", + "h": "outline_panel::CollapseSelectedEntry", + "j": "vim::MenuSelectNext", + "k": "vim::MenuSelectPrevious", + "down": "vim::MenuSelectNext", + "up": "vim::MenuSelectPrevious", + "l": "outline_panel::ExpandSelectedEntry", "shift-g": "menu::SelectLast", - "g g": "menu::SelectFirst" - } + "g g": "menu::SelectFirst", + "-": "outline_panel::SelectParent", + "enter": "editor::ToggleFocus", + "/": "menu::Cancel", + "ctrl-u": "outline_panel::ScrollUp", + "ctrl-d": "outline_panel::ScrollDown", + "z t": "outline_panel::ScrollCursorTop", + "z z": "outline_panel::ScrollCursorCenter", + "z b": "outline_panel::ScrollCursorBottom", + "0": ["vim::Number", 0], + "1": ["vim::Number", 1], + "2": ["vim::Number", 2], + "3": ["vim::Number", 3], + "4": ["vim::Number", 4], + "5": ["vim::Number", 5], + "6": ["vim::Number", 6], + "7": ["vim::Number", 7], + "8": ["vim::Number", 8], + "9": ["vim::Number", 9], + }, + }, + { + "context": "OutlinePanel && editing", + "bindings": { + "enter": "menu::Cancel", + }, }, { "context": "GitPanel && ChangesList", @@ -972,8 +1022,8 @@ "x": "git::ToggleStaged", "shift-x": "git::StageAll", "g x": "git::StageRange", - "shift-u": "git::UnstageAll" - } + "shift-u": "git::UnstageAll", + }, }, { "context": "Editor && mode == auto_height && VimControl", @@ -984,8 +1034,8 @@ "#": null, "*": null, "n": null, - "shift-n": null - } + "shift-n": null, + }, }, { "context": "Picker > Editor", @@ -994,29 +1044,29 @@ "ctrl-u": "editor::DeleteToBeginningOfLine", "ctrl-w": "editor::DeleteToPreviousWordStart", "ctrl-p": "menu::SelectPrevious", - "ctrl-n": "menu::SelectNext" - } + "ctrl-n": "menu::SelectNext", + }, }, { "context": "GitCommit > Editor && VimControl && vim_mode == normal", "bindings": { "ctrl-c": "menu::Cancel", - "escape": "menu::Cancel" - } + "escape": "menu::Cancel", + }, }, { "context": "Editor && edit_prediction", "bindings": { // This is identical to the binding in the base keymap, but the vim bindings above to // "vim::Tab" shadow it, so it needs to be bound again. - "tab": "editor::AcceptEditPrediction" - } + "tab": "editor::AcceptEditPrediction", + }, }, { "context": "MessageEditor > Editor && VimControl", "bindings": { - "enter": "agent::Chat" - } + "enter": "agent::Chat", + }, }, { "context": "os != macos && Editor && edit_prediction_conflict", @@ -1024,8 +1074,8 @@ // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This // is because alt-tab may not be available, as it is often used for window switching on Linux // and Windows. - "alt-l": "editor::AcceptEditPrediction" - } + "alt-l": "editor::AcceptEditPrediction", + }, }, { "context": "SettingsWindow > NavigationMenu && !search", @@ -1035,7 +1085,16 @@ "k": "settings_editor::FocusPreviousNavEntry", "j": "settings_editor::FocusNextNavEntry", "g g": "settings_editor::FocusFirstNavEntry", - "shift-g": "settings_editor::FocusLastNavEntry" - } - } + "shift-g": "settings_editor::FocusLastNavEntry", + }, + }, + { + "context": "MarkdownPreview", + "bindings": { + "ctrl-u": "markdown::ScrollPageUp", + "ctrl-d": "markdown::ScrollPageDown", + "ctrl-y": "markdown::ScrollUp", + "ctrl-e": "markdown::ScrollDown", + }, + }, ] diff --git a/assets/prompts/content_prompt_v2.hbs b/assets/prompts/content_prompt_v2.hbs new file mode 100644 index 0000000000000000000000000000000000000000..826aada8c04863c21d756cf99beb64e582ed4906 --- /dev/null +++ b/assets/prompts/content_prompt_v2.hbs @@ -0,0 +1,40 @@ +{{#if language_name}} +Here's a file of {{language_name}} that the user is going to ask you to make an edit to. +{{else}} +Here's a file of text that the user is going to ask you to make an edit to. +{{/if}} + +The section you'll need to rewrite is marked with tags. + + +{{{document_content}}} + + +{{#if is_truncated}} +The context around the relevant section has been truncated (possibly in the middle of a line) for brevity. +{{/if}} + +And here's the section to rewrite based on that prompt again for reference: + + +{{{rewrite_section}}} + + +{{#if diagnostic_errors}} +Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to. + +{{#each diagnostic_errors}} + + {{line_number}} + {{error_message}} + {{code_content}} + +{{/each}} +{{/if}} + +Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved. + +Start at the indentation level in the original file in the rewritten {{content_type}}. + +IMPORTANT: You MUST use one of the provided tools to make the rewrite or to provide an explanation as to why the user's request cannot be fulfilled. You MUST NOT send back unstructured text. If you need to make a statement or ask a question you MUST use one of the tools to do so. +It is an error if you try to make a change that cannot be made simply by editing the rewrite_section. diff --git a/assets/settings/default.json b/assets/settings/default.json index d8c800081246dcf937f7380399d726dd3d349679..e7df5ef0bf2d3bc805c79f79811d9929343544ef 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -12,7 +12,7 @@ "theme": { "mode": "system", "light": "One Light", - "dark": "One Dark" + "dark": "One Dark", }, "icon_theme": "Zed (Default)", // The name of a base set of key bindings to use. @@ -29,7 +29,7 @@ // Features that can be globally enabled or disabled "features": { // Which edit prediction provider to use. - "edit_prediction_provider": "zed" + "edit_prediction_provider": "zed", }, // The name of a font to use for rendering text in the editor // ".ZedMono" currently aliases to Lilex @@ -69,7 +69,7 @@ // The OpenType features to enable for text in the UI "ui_font_features": { // Disable ligatures: - "calt": false + "calt": false, }, // The weight of the UI font in standard CSS units from 100 to 900. "ui_font_weight": 400, @@ -87,7 +87,7 @@ "border_size": 0.0, // Opacity of the inactive panes. 0 means transparent, 1 means opaque. // Values are clamped to the [0.0, 1.0] range. - "inactive_opacity": 1.0 + "inactive_opacity": 1.0, }, // Layout mode of the bottom dock. Defaults to "contained" // choices: contained, full, left_aligned, right_aligned @@ -103,12 +103,12 @@ "left_padding": 0.2, // The relative width of the right padding of the central pane from the // workspace when the centered layout is used. - "right_padding": 0.2 + "right_padding": 0.2, }, // Image viewer settings "image_viewer": { // The unit for image file sizes: "binary" (KiB, MiB) or decimal (KB, MB) - "unit": "binary" + "unit": "binary", }, // Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier. // @@ -175,6 +175,16 @@ // // Default: true "zoomed_padding": true, + // What draws Zed's window decorations (titlebar): + // 1. Client application (Zed) draws its own window decorations + // "client" + // 2. Display server draws the window decorations. Not supported by GNOME Wayland. + // "server" + // + // This requires restarting Zed for changes to take effect. + // + // Default: "client" + "window_decorations": "client", // Whether to use the system provided dialogs for Open and Save As. // When set to false, Zed will use the built-in keyboard-first pickers. "use_system_path_prompts": true, @@ -255,6 +265,12 @@ // Whether to display inline and alongside documentation for items in the // completions menu "show_completion_documentation": true, + // Whether to colorize brackets in the editor. + // (also known as "rainbow brackets") + // + // The colors that are used for different indentation levels are defined in the theme (theme key: `accents`). + // They can be customized by using theme overrides. + "colorize_brackets": false, // When to show the scrollbar in the completion menu. // This setting can take four values: // @@ -280,7 +296,7 @@ // When true, enables drag and drop text selection in buffer. "enabled": true, // The delay in milliseconds that must elapse before drag and drop is allowed. Otherwise, a new text selection is created. - "delay": 300 + "delay": 300, }, // What to do when go to definition yields no results. // @@ -384,14 +400,14 @@ // Visible characters used to render whitespace when show_whitespaces is enabled. "whitespace_map": { "space": "•", - "tab": "→" + "tab": "→", }, // Settings related to calls in Zed "calls": { // Join calls with the microphone live by default "mute_on_join": false, // Share your project when you are the first to join a channel - "share_on_join": false + "share_on_join": false, }, // Toolbar related settings "toolbar": { @@ -404,7 +420,7 @@ // Whether to show agent review buttons in the editor toolbar. "agent_review": true, // Whether to show code action buttons in the editor toolbar. - "code_actions": false + "code_actions": false, }, // Whether to allow windows to tab together based on the user’s tabbing preference (macOS only). "use_system_window_tabs": false, @@ -420,10 +436,12 @@ "show_onboarding_banner": true, // Whether to show user picture in the titlebar. "show_user_picture": true, + // Whether to show the user menu in the titlebar. + "show_user_menu": true, // Whether to show the sign in button in the titlebar. "show_sign_in": true, // Whether to show the menus in the titlebar. - "show_menus": false + "show_menus": false, }, "audio": { // Opt into the new audio system. @@ -456,7 +474,7 @@ // the future we will migrate by setting this to false // // You need to rejoin a call for this setting to apply - "experimental.legacy_audio_compatible": true + "experimental.legacy_audio_compatible": true, }, // Scrollbar related settings "scrollbar": { @@ -495,8 +513,8 @@ // When false, forcefully disables the horizontal scrollbar. Otherwise, obey other settings. "horizontal": true, // When false, forcefully disables the vertical scrollbar. Otherwise, obey other settings. - "vertical": true - } + "vertical": true, + }, }, // Minimap related settings "minimap": { @@ -544,7 +562,7 @@ // 3. "gutter" or "none" to not highlight the current line in the minimap. "current_line_highlight": null, // Maximum number of columns to display in the minimap. - "max_width_columns": 80 + "max_width_columns": 80, }, // Enable middle-click paste on Linux. "middle_click_paste": true, @@ -567,7 +585,7 @@ // Whether to show fold buttons in the gutter. "folds": true, // Minimum number of characters to reserve space for in the gutter. - "min_line_number_digits": 4 + "min_line_number_digits": 4, }, "indent_guides": { // Whether to show indent guides in the editor. @@ -588,7 +606,7 @@ // // 1. "disabled" // 2. "indent_aware" - "background_coloring": "disabled" + "background_coloring": "disabled", }, // Whether the editor will scroll beyond the last line. "scroll_beyond_last_line": "one_page", @@ -607,7 +625,7 @@ "fast_scroll_sensitivity": 4.0, "sticky_scroll": { // Whether to stick scopes to the top of the editor. - "enabled": false + "enabled": false, }, "relative_line_numbers": "disabled", // If 'search_wrap' is disabled, search result do not wrap around the end of the file. @@ -625,7 +643,7 @@ // Whether to interpret the search query as a regular expression. "regex": false, // Whether to center the cursor on each search match when navigating. - "center_on_match": false + "center_on_match": false, }, // When to populate a new search's query based on the text under the cursor. // This setting can take the following three values: @@ -668,8 +686,8 @@ "shift": false, "alt": false, "platform": false, - "function": false - } + "function": false, + }, }, // Whether to resize all the panels in a dock when resizing the dock. // Can be a combination of "left", "right" and "bottom". @@ -717,7 +735,7 @@ // "always" // 5. Never show the scrollbar: // "never" - "show": null + "show": null, }, // Which files containing diagnostic errors/warnings to mark in the project panel. // This setting can take the following three values: @@ -740,16 +758,33 @@ // "always" // 2. Never show indent guides: // "never" - "show": "always" + "show": "always", }, + // Sort order for entries in the project panel. + // This setting can take three values: + // + // 1. Show directories first, then files: + // "directories_first" + // 2. Mix directories and files together: + // "mixed" + // 3. Show files first, then directories: + // "files_first" + "sort_mode": "directories_first", // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window. "hide_root": false, // Whether to hide the hidden entries in the project panel. "hide_hidden": false, - // Whether to automatically open files when pasting them in the project panel. - "open_file_on_paste": true + // Settings for automatically opening files. + "auto_open": { + // Whether to automatically open newly created files in the editor. + "on_create": true, + // Whether to automatically open files after pasting or duplicating them. + "on_paste": true, + // Whether to automatically open files dropped from external sources. + "on_drop": true, + }, }, "outline_panel": { // Whether to show the outline panel button in the status bar @@ -782,7 +817,7 @@ // "always" // 2. Never show indent guides: // "never" - "show": "always" + "show": "always", }, // Scrollbar-related settings "scrollbar": { @@ -799,11 +834,11 @@ // "always" // 5. Never show the scrollbar: // "never" - "show": null + "show": null, }, // Default depth to expand outline items in the current file. // Set to 0 to collapse all items that have children, 1 or higher to collapse items at that depth or deeper. - "expand_outlines_with_depth": 100 + "expand_outlines_with_depth": 100, }, "collaboration_panel": { // Whether to show the collaboration panel button in the status bar. @@ -811,7 +846,7 @@ // Where to dock the collaboration panel. Can be 'left' or 'right'. "dock": "left", // Default width of the collaboration panel. - "default_width": 240 + "default_width": 240, }, "git_panel": { // Whether to show the git panel button in the status bar. @@ -837,18 +872,22 @@ // // Default: false "collapse_untracked_diff": false, + /// Whether to show entries with tree or flat view in the panel + /// + /// Default: false + "tree_view": false, "scrollbar": { // When to show the scrollbar in the git panel. // // Choices: always, auto, never, system // Default: inherits editor scrollbar settings // "show": null - } + }, }, "message_editor": { // Whether to automatically replace emoji shortcodes with emoji characters. // For example: typing `:wave:` gets replaced with `👋`. - "auto_replace_emoji_shortcode": true + "auto_replace_emoji_shortcode": true, }, "notification_panel": { // Whether to show the notification panel button in the status bar. @@ -856,9 +895,11 @@ // Where to dock the notification panel. Can be 'left' or 'right'. "dock": "right", // Default width of the notification panel. - "default_width": 380 + "default_width": 380, }, "agent": { + // Whether the inline assistant should use streaming tools, when available + "inline_assistant_use_streaming_tools": true, // Whether the agent is enabled. "enabled": true, // What completion mode to start new threads in, if available. Can be 'normal' or 'burn'. @@ -867,6 +908,8 @@ "button": true, // Where to dock the agent panel. Can be 'left', 'right' or 'bottom'. "dock": "right", + // Where to dock the agents panel. Can be 'left' or 'right'. + "agents_panel_dock": "left", // Default width when the agent panel is docked to the left or right. "default_width": 640, // Default height when the agent panel is docked to the bottom. @@ -878,7 +921,7 @@ // The provider to use. "provider": "zed.dev", // The model to use. - "model": "claude-sonnet-4" + "model": "claude-sonnet-4", }, // Additional parameters for language model requests. When making a request to a model, parameters will be taken // from the last entry in this list that matches the model's provider and name. In each entry, both provider @@ -929,12 +972,14 @@ "now": true, "find_path": true, "read_file": true, + "restore_file_from_disk": true, + "save_file": true, "open": true, "grep": true, "terminal": true, "thinking": true, - "web_search": true - } + "web_search": true, + }, }, "ask": { "name": "Ask", @@ -951,14 +996,14 @@ "open": true, "grep": true, "thinking": true, - "web_search": true - } + "web_search": true, + }, }, "minimal": { "name": "Minimal", "enable_all_context_servers": false, - "tools": {} - } + "tools": {}, + }, }, // Where to show notifications when the agent has either completed // its response, or else needs confirmation before it can run a @@ -987,7 +1032,7 @@ // Minimum number of lines to display in the agent message editor. // // Default: 4 - "message_editor_min_lines": 4 + "message_editor_min_lines": 4, }, // Whether the screen sharing icon is shown in the os status bar. "show_call_status_icon": true, @@ -1022,7 +1067,7 @@ // Whether or not to show the navigation history buttons. "show_nav_history_buttons": true, // Whether or not to show the tab bar buttons. - "show_tab_bar_buttons": true + "show_tab_bar_buttons": true, }, // Settings related to the editor's tabs "tabs": { @@ -1061,19 +1106,28 @@ // "errors" // 3. Mark files with errors and warnings: // "all" - "show_diagnostics": "off" + "show_diagnostics": "off", }, // Settings related to preview tabs. "preview_tabs": { // Whether preview tabs should be enabled. // Preview tabs allow you to open files in preview mode, where they close automatically - // when you switch to another file unless you explicitly pin them. + // when you open another preview tab. // This is useful for quickly viewing files without cluttering your workspace. "enabled": true, + // Whether to open tabs in preview mode when opened from the project panel with a single click. + "enable_preview_from_project_panel": true, // Whether to open tabs in preview mode when selected from the file finder. "enable_preview_from_file_finder": false, - // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. - "enable_preview_from_code_navigation": false + // Whether to open tabs in preview mode when opened from a multibuffer. + "enable_preview_from_multibuffer": true, + // Whether to open tabs in preview mode when code navigation is used to open a multibuffer. + "enable_preview_multibuffer_from_code_navigation": false, + // Whether to open tabs in preview mode when code navigation is used to open a single file. + "enable_preview_file_from_code_navigation": true, + // Whether to keep tabs in preview mode when code navigation is used to navigate away from them. + // If `enable_preview_file_from_code_navigation` or `enable_preview_multibuffer_from_code_navigation` is also true, the new tab may replace the existing one. + "enable_keep_preview_on_code_navigation": false, }, // Settings related to the file finder. "file_finder": { @@ -1117,7 +1171,7 @@ // * "all": Use all gitignored files // * "indexed": Use only the files Zed had indexed // * "smart": Be smart and search for ignored when called from a gitignored worktree - "include_ignored": "smart" + "include_ignored": "smart", }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. @@ -1176,12 +1230,19 @@ "tab_size": 4, // What debuggers are preferred by default for all languages. "debuggers": [], + // Whether to enable word diff highlighting in the editor. + // + // When enabled, changed words within modified lines are highlighted + // to show exactly what changed. + // + // Default: true + "word_diff_enabled": true, // Control what info is collected by Zed. "telemetry": { // Send debug info like crash reports. "diagnostics": true, // Send anonymized usage data like what languages you're using Zed with. - "metrics": true + "metrics": true, }, // Whether to disable all AI features in Zed. // @@ -1215,7 +1276,7 @@ "enabled": true, // Minimum time to wait before pulling diagnostics from the language server(s). // 0 turns the debounce off. - "debounce_ms": 50 + "debounce_ms": 50, }, // Settings for inline diagnostics "inline": { @@ -1233,8 +1294,8 @@ "min_column": 0, // The minimum severity of the diagnostics to show inline. // Inherits editor's diagnostics' max severity settings when `null`. - "max_severity": null - } + "max_severity": null, + }, }, // Files or globs of files that will be excluded by Zed entirely. They will be skipped during file // scans, file searches, and not be displayed in the project file tree. Takes precedence over `file_scan_inclusions`. @@ -1248,7 +1309,7 @@ "**/.DS_Store", "**/Thumbs.db", "**/.classpath", - "**/.settings" + "**/.settings", ], // Files or globs of files that will be included by Zed, even when ignored by git. This is useful // for files that are not tracked by git, but are still important to your project. Note that globs @@ -1283,14 +1344,14 @@ // Whether or not to display the git commit summary on the same line. "show_commit_summary": false, // The minimum column number to show the inline blame information at - "min_column": 0 + "min_column": 0, }, "blame": { - "show_avatar": true + "show_avatar": true, }, // Control which information is shown in the branch picker. "branch_picker": { - "show_author_name": true + "show_author_name": true, }, // How git hunks are displayed visually in the editor. // This setting can take two values: @@ -1299,7 +1360,10 @@ // "hunk_style": "staged_hollow" // 2. Show unstaged hunks hollow and staged hunks filled: // "hunk_style": "unstaged_hollow" - "hunk_style": "staged_hollow" + "hunk_style": "staged_hollow", + // Should the name or path be displayed first in the git view. + // "path_style": "file_name_first" or "file_path_first" + "path_style": "file_name_first", }, // The list of custom Git hosting providers. "git_hosting_providers": [ @@ -1314,6 +1378,8 @@ // "load_direnv": "direct" // 2. Load direnv configuration through the shell hook, works for POSIX shells and fish. // "load_direnv": "shell_hook" + // 3. Don't load direnv configuration at all. + // "load_direnv": "disabled" "load_direnv": "direct", "edit_predictions": { // A list of globs representing files that edit predictions should be disabled for. @@ -1331,7 +1397,7 @@ "**/secrets.yml", "**/.zed/settings.json", // zed project settings "/**/zed/settings.json", // zed user settings - "/**/zed/keymap.json" + "/**/zed/keymap.json", ], // When to show edit predictions previews in buffer. // This setting takes two possible values: @@ -1349,15 +1415,16 @@ "copilot": { "enterprise_uri": null, "proxy": null, - "proxy_no_verify": null + "proxy_no_verify": null, }, "codestral": { - "model": null, - "max_tokens": null + "api_url": "https://codestral.mistral.ai", + "model": "codestral-latest", + "max_tokens": 150, }, // Whether edit predictions are enabled when editing text threads in the agent panel. // This setting has no effect if globally disabled. - "enabled_in_text_threads": true + "enabled_in_text_threads": true, }, // Settings specific to journaling "journal": { @@ -1367,7 +1434,7 @@ // May take 2 values: // 1. hour12 // 2. hour24 - "hour_format": "hour12" + "hour_format": "hour12", }, // Status bar-related settings. "status_bar": { @@ -1378,7 +1445,7 @@ // Whether to show the cursor position button in the status bar. "cursor_position_button": true, // Whether to show active line endings button in the status bar. - "line_endings_button": false + "line_endings_button": false, }, // Settings specific to the terminal "terminal": { @@ -1405,7 +1472,7 @@ "default_height": 320, // What working directory to use when launching the terminal. // May take 4 values: - // 1. Use the current file's project directory. Will Fallback to the + // 1. Use the current file's project directory. Fallback to the // first project directory strategy if unsuccessful // "working_directory": "current_project_directory" // 2. Use the first project in this workspace's directory @@ -1499,8 +1566,8 @@ // Preferred Conda manager to use when activating Conda environments. // Values: "auto", "conda", "mamba", "micromamba" // Default: "auto" - "conda_manager": "auto" - } + "conda_manager": "auto", + }, }, "toolbar": { // Whether to display the terminal title in its toolbar's breadcrumbs. @@ -1508,7 +1575,7 @@ // // The shell running in the terminal needs to be configured to emit the title. // Example: `echo -e "\e]2;New Title\007";` - "breadcrumbs": false + "breadcrumbs": false, }, // Scrollbar-related settings "scrollbar": { @@ -1525,7 +1592,7 @@ // "always" // 5. Never show the scrollbar: // "never" - "show": null + "show": null, }, // Set the terminal's font size. If this option is not included, // the terminal will default to matching the buffer's font size. @@ -1543,6 +1610,8 @@ // Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling. // Existing terminals will not pick up this change until they are recreated. "max_scroll_history_lines": 10000, + // The multiplier for scrolling speed in the terminal. + "scroll_multiplier": 1.0, // The minimum APCA perceptual contrast between foreground and background colors. // APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x, // especially for dark mode. Values range from 0 to 106. @@ -1557,7 +1626,55 @@ // // Most terminal themes have APCA values of 40-70. // A value of 45 preserves colorful themes while ensuring legibility. - "minimum_contrast": 45 + "minimum_contrast": 45, + // Regexes used to identify paths for hyperlink navigation. Supports optional named capture + // groups `path`, `line`, `column`, and `link`. If none of these are present, the entire match + // is the hyperlink target. If `path` is present, it is the hyperlink target, along with `line` + // and `column` if present. `link` may be used to customize what text in terminal is part of the + // hyperlink. If `link` is not present, the text of the entire match is used. If `line` and + // `column` are not present, the default built-in line and column suffix processing is used + // which parses `line:column` and `(line,column)` variants. The default value handles Python + // diagnostics and common path, line, column syntaxes. This can be extended or replaced to + // handle specific scenarios. For example, to enable support for hyperlinking paths which + // contain spaces in rust output, + // + // [ + // "\\s+(-->|:::|at) (?(?.+?))(:$|$)", + // "\\s+(Compiling|Checking|Documenting) [^(]+\\((?(?.+))\\)" + // ], + // + // could be used. Processing stops at the first regex with a match, even if no link is + // produced which is the case when the cursor is not over the hyperlinked text. For best + // performance it is recommended to order regexes from most common to least common. For + // readability and documentation, each regex may be an array of strings which are collected + // into one multi-line regex string for use in terminal path hyperlink detection. + "path_hyperlink_regexes": [ + // Python-style diagnostics + "File \"(?[^\"]+)\", line (?[0-9]+)", + // Common path syntax with optional line, column, description, trailing punctuation, or + // surrounding symbols or quotes + [ + "(?x)", + "(?", + " (", + " # multi-char path: first char (not opening delimiter or space)", + " [^({\\[<\"'`\\ ]", + " # middle chars: non-space, and colon/paren only if not followed by digit/paren", + " ([^\\ :(]|[:(][^0-9()])*", + " # last char: not closing delimiter or colon", + " [^()}\\]>\"'`.,;:\\ ]", + " |", + " # single-char path: not delimiter, punctuation, or space", + " [^(){}\\[\\]<>\"'`.,;:\\ ]", + " )", + " # optional line/column suffix (included in path for PathWithPosition::parse_str)", + " (:+[0-9]+(:[0-9]+)?|:?\\([0-9]+([,:]?[0-9]+)?\\))?", + ")", + ], + ], + // Timeout for hover and Cmd-click path hyperlink discovery in milliseconds. Specifying a + // timeout of `0` will disable path hyperlinking in terminal. + "path_hyperlink_timeout_ms": 1, }, "code_actions_on_format": {}, // Settings related to running tasks. @@ -1573,7 +1690,7 @@ // * Zed task from history (e.g. one-off task was spawned before) // // Default: true - "prefer_lsp": true + "prefer_lsp": true, }, // An object whose keys are language names, and whose values // are arrays of filenames or extensions of files that should @@ -1588,9 +1705,14 @@ // } // "file_types": { - "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"], + "JSONC": [ + "**/.zed/*.json", + "**/.vscode/**/*.json", + "**/{zed,Zed}/{settings,keymap,tasks,debug}.json", + "tsconfig*.json", + ], "Markdown": [".rules", ".cursorrules", ".windsurfrules", ".clinerules"], - "Shell Script": [".env.*"] + "Shell Script": [".env.*"], }, // Settings for which version of Node.js and NPM to use when installing // language servers and Copilot. @@ -1606,14 +1728,14 @@ // `path`, but not `npm_path`, Zed will assume that `npm` is located at // `${path}/../npm`. "path": null, - "npm_path": null + "npm_path": null, }, // The extensions that Zed should automatically install on startup. // // If you don't want any of these extensions, add this field to your settings // and change the value to `false`. "auto_install_extensions": { - "html": true + "html": true, }, // The capabilities granted to extensions. // @@ -1621,7 +1743,7 @@ "granted_extension_capabilities": [ { "kind": "process:exec", "command": "*", "args": ["**"] }, { "kind": "download_file", "host": "*", "path": ["**"] }, - { "kind": "npm:install", "package": "*" } + { "kind": "npm:install", "package": "*" }, ], // Controls how completions are processed for this language. "completions": { @@ -1672,7 +1794,7 @@ // 4. "replace_suffix" // Behaves like `"replace"` if the text after the cursor is a suffix of the completion, and like // `"insert"` otherwise. - "lsp_insert_mode": "replace_suffix" + "lsp_insert_mode": "replace_suffix", }, // Different settings for specific languages. "languages": { @@ -1680,113 +1802,116 @@ "language_servers": ["astro-language-server", "..."], "prettier": { "allowed": true, - "plugins": ["prettier-plugin-astro"] - } + "plugins": ["prettier-plugin-astro"], + }, }, "Blade": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "C": { "format_on_save": "off", "use_on_type_format": false, "prettier": { - "allowed": false - } + "allowed": false, + }, }, "C++": { "format_on_save": "off", "use_on_type_format": false, "prettier": { - "allowed": false - } + "allowed": false, + }, + }, + "CSharp": { + "language_servers": ["roslyn", "!omnisharp", "..."], }, "CSS": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "Dart": { - "tab_size": 2 + "tab_size": 2, }, "Diff": { "show_edit_predictions": false, "remove_trailing_whitespace_on_save": false, - "ensure_final_newline_on_save": false + "ensure_final_newline_on_save": false, }, "Elixir": { - "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."] + "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."], }, "Elm": { - "tab_size": 4 + "tab_size": 4, }, "Erlang": { - "language_servers": ["erlang-ls", "!elp", "..."] + "language_servers": ["erlang-ls", "!elp", "..."], }, "Git Commit": { "allow_rewrap": "anywhere", "soft_wrap": "editor_width", - "preferred_line_length": 72 + "preferred_line_length": 72, }, "Go": { "hard_tabs": true, "code_actions_on_format": { - "source.organizeImports": true + "source.organizeImports": true, }, - "debuggers": ["Delve"] + "debuggers": ["Delve"], }, "GraphQL": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "HEEX": { - "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."] + "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."], }, "HTML": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "HTML+ERB": { - "language_servers": ["herb", "!ruby-lsp", "..."] + "language_servers": ["herb", "!ruby-lsp", "..."], }, "Java": { "prettier": { "allowed": true, - "plugins": ["prettier-plugin-java"] - } + "plugins": ["prettier-plugin-java"], + }, }, "JavaScript": { "language_servers": ["!typescript-language-server", "vtsls", "..."], "prettier": { - "allowed": true - } + "allowed": true, + }, }, "JSON": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "JSONC": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "JS+ERB": { - "language_servers": ["!ruby-lsp", "..."] + "language_servers": ["!ruby-lsp", "..."], }, "Kotlin": { - "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."] + "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."], }, "LaTeX": { "formatter": "language_server", "language_servers": ["texlab", "..."], "prettier": { "allowed": true, - "plugins": ["prettier-plugin-latex"] - } + "plugins": ["prettier-plugin-latex"], + }, }, "Markdown": { "format_on_save": "off", @@ -1794,136 +1919,145 @@ "remove_trailing_whitespace_on_save": false, "allow_rewrap": "anywhere", "soft_wrap": "editor_width", + "completions": { + "words": "disabled", + }, "prettier": { - "allowed": true - } + "allowed": true, + }, }, "PHP": { - "language_servers": ["phpactor", "!intelephense", "..."], + "language_servers": ["phpactor", "!intelephense", "!phptools", "..."], "prettier": { "allowed": true, "plugins": ["@prettier/plugin-php"], - "parser": "php" - } + "parser": "php", + }, }, "Plain Text": { "allow_rewrap": "anywhere", - "soft_wrap": "editor_width" + "soft_wrap": "editor_width", + "completions": { + "words": "disabled", + }, + }, + "Proto": { + "language_servers": ["buf", "!protols", "!protobuf-language-server", "..."], }, "Python": { "code_actions_on_format": { - "source.organizeImports.ruff": true + "source.organizeImports.ruff": true, }, "formatter": { "language_server": { - "name": "ruff" - } + "name": "ruff", + }, }, "debuggers": ["Debugpy"], - "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."] + "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."], }, "Ruby": { - "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."] + "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."], }, "Rust": { - "debuggers": ["CodeLLDB"] + "debuggers": ["CodeLLDB"], }, "SCSS": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "Starlark": { - "language_servers": ["starpls", "!buck2-lsp", "..."] + "language_servers": ["starpls", "!buck2-lsp", "..."], }, "Svelte": { "language_servers": ["svelte-language-server", "..."], "prettier": { "allowed": true, - "plugins": ["prettier-plugin-svelte"] - } + "plugins": ["prettier-plugin-svelte"], + }, }, "TSX": { "language_servers": ["!typescript-language-server", "vtsls", "..."], "prettier": { - "allowed": true - } + "allowed": true, + }, }, "Twig": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "TypeScript": { "language_servers": ["!typescript-language-server", "vtsls", "..."], "prettier": { - "allowed": true - } + "allowed": true, + }, }, "SystemVerilog": { "format_on_save": "off", "language_servers": ["!slang", "..."], - "use_on_type_format": false + "use_on_type_format": false, }, "Vue.js": { "language_servers": ["vue-language-server", "vtsls", "..."], "prettier": { - "allowed": true - } + "allowed": true, + }, }, "XML": { "prettier": { "allowed": true, - "plugins": ["@prettier/plugin-xml"] - } + "plugins": ["@prettier/plugin-xml"], + }, }, "YAML": { "prettier": { - "allowed": true - } + "allowed": true, + }, }, "YAML+ERB": { - "language_servers": ["!ruby-lsp", "..."] + "language_servers": ["!ruby-lsp", "..."], }, "Zig": { - "language_servers": ["zls", "..."] - } + "language_servers": ["zls", "..."], + }, }, // Different settings for specific language models. "language_models": { "anthropic": { - "api_url": "https://api.anthropic.com" + "api_url": "https://api.anthropic.com", }, "bedrock": {}, "google": { - "api_url": "https://generativelanguage.googleapis.com" + "api_url": "https://generativelanguage.googleapis.com", }, "ollama": { - "api_url": "http://localhost:11434" + "api_url": "http://localhost:11434", }, "openai": { - "api_url": "https://api.openai.com/v1" + "api_url": "https://api.openai.com/v1", }, "openai_compatible": {}, "open_router": { - "api_url": "https://openrouter.ai/api/v1" + "api_url": "https://openrouter.ai/api/v1", }, "lmstudio": { - "api_url": "http://localhost:1234/api/v0" + "api_url": "http://localhost:1234/api/v0", }, "deepseek": { - "api_url": "https://api.deepseek.com/v1" + "api_url": "https://api.deepseek.com/v1", }, "mistral": { - "api_url": "https://api.mistral.ai/v1" + "api_url": "https://api.mistral.ai/v1", }, "vercel": { - "api_url": "https://api.v0.dev/v1" + "api_url": "https://api.v0.dev/v1", }, "x_ai": { - "api_url": "https://api.x.ai/v1" + "api_url": "https://api.x.ai/v1", }, - "zed.dev": {} + "zed.dev": {}, }, "session": { // Whether or not to restore unsaved buffers on restart. @@ -1932,7 +2066,13 @@ // dirty files when closing the application. // // Default: true - "restore_unsaved_buffers": true + "restore_unsaved_buffers": true, + // Whether or not to skip worktree trust checks. + // When trusted, project settings are synchronized automatically, + // language and MCP servers are downloaded and started automatically. + // + // Default: false + "trust_all_worktrees": false, }, // Zed's Prettier integration settings. // Allows to enable/disable formatting with Prettier @@ -1950,11 +2090,11 @@ // "singleQuote": true // Forces Prettier integration to use a specific parser name when formatting files with the language // when set to a non-empty string. - "parser": "" + "parser": "", }, // Settings for auto-closing of JSX tags. "jsx_tag_auto_close": { - "enabled": true + "enabled": true, }, // LSP Specific settings. "lsp": { @@ -1975,19 +2115,19 @@ // Specify the DAP name as a key here. "CodeLLDB": { "env": { - "RUST_LOG": "info" - } - } + "RUST_LOG": "info", + }, + }, }, // Common language server settings. "global_lsp_settings": { // Whether to show the LSP servers button in the status bar. - "button": true + "button": true, }, // Jupyter settings "jupyter": { "enabled": true, - "kernel_selections": {} + "kernel_selections": {}, // Specify the language name as the key and the kernel name as the value. // "kernel_selections": { // "python": "conda-base" @@ -2001,7 +2141,7 @@ "max_columns": 128, // Maximum number of lines to keep in REPL's scrollback buffer. // Clamped with [4, 256] range. - "max_lines": 32 + "max_lines": 32, }, // Vim settings "vim": { @@ -2015,7 +2155,14 @@ // Specify the mode as the key and the shape as the value. // The mode can be one of the following: "normal", "replace", "insert", "visual". // The shape can be one of the following: "block", "bar", "underline", "hollow". - "cursor_shape": {} + "cursor_shape": {}, + }, + // Which-key popup settings + "which_key": { + // Whether to show the which-key popup when holding down key combinations. + "enabled": false, + // Delay in milliseconds before showing the which-key popup. + "delay_ms": 1000, }, // The server to connect to. If the environment variable // ZED_SERVER_URL is set, it will override this setting. @@ -2040,6 +2187,18 @@ "dev": { // "theme": "Andromeda" }, + // Settings overrides to use when using Linux. + "linux": {}, + // Settings overrides to use when using macOS. + "macos": {}, + // Settings overrides to use when using Windows. + "windows": { + "languages": { + "PHP": { + "language_servers": ["intelephense", "!phpactor", "!phptools", "..."], + }, + }, + }, // Whether to show full labels in line indicator or short ones // // Values: @@ -2097,7 +2256,7 @@ "dock": "bottom", "log_dap_communications": true, "format_dap_log_messages": true, - "button": true + "button": true, }, // Configures any number of settings profiles that are temporarily applied on // top of your existing user settings when selected from @@ -2124,5 +2283,5 @@ // Useful for filtering out noisy logs or enabling more verbose logging. // // Example: {"log": {"client": "warn"}} - "log": {} + "log": {}, } diff --git a/assets/settings/initial_debug_tasks.json b/assets/settings/initial_debug_tasks.json index af4512bd51aa82d57ce62e605b45ee61e8f98030..851289392a65aecfca17e00d4c123823ac9e21cb 100644 --- a/assets/settings/initial_debug_tasks.json +++ b/assets/settings/initial_debug_tasks.json @@ -8,7 +8,7 @@ "adapter": "Debugpy", "program": "$ZED_FILE", "request": "launch", - "cwd": "$ZED_WORKTREE_ROOT" + "cwd": "$ZED_WORKTREE_ROOT", }, { "label": "Debug active JavaScript file", @@ -16,7 +16,7 @@ "program": "$ZED_FILE", "request": "launch", "cwd": "$ZED_WORKTREE_ROOT", - "type": "pwa-node" + "type": "pwa-node", }, { "label": "JavaScript debug terminal", @@ -24,6 +24,6 @@ "request": "launch", "cwd": "$ZED_WORKTREE_ROOT", "console": "integratedTerminal", - "type": "pwa-node" - } + "type": "pwa-node", + }, ] diff --git a/assets/settings/initial_server_settings.json b/assets/settings/initial_server_settings.json index d6ec33e60128380378610a273a1bbdff1ecdbaa8..29aa569b105157df7ec48164e2066fdac72c7b41 100644 --- a/assets/settings/initial_server_settings.json +++ b/assets/settings/initial_server_settings.json @@ -3,5 +3,5 @@ // For a full list of overridable settings, and general information on settings, // see the documentation: https://zed.dev/docs/configuring-zed#settings-files { - "lsp": {} + "lsp": {}, } diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json index a79e98063237ca297a89b0d151bd48149061b7bb..5bedafbd3a1e75a755598e37cd673742e146fdcc 100644 --- a/assets/settings/initial_tasks.json +++ b/assets/settings/initial_tasks.json @@ -47,8 +47,8 @@ // Whether to show the task line in the output of the spawned task, defaults to `true`. "show_summary": true, // Whether to show the command line in the output of the spawned task, defaults to `true`. - "show_command": true + "show_command": true, // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] - } + }, ] diff --git a/assets/settings/initial_user_settings.json b/assets/settings/initial_user_settings.json index 5ac2063bdb481e057a2d124c1e72f998390b066b..8b573854895a03243803a71a91a35af647f45ca2 100644 --- a/assets/settings/initial_user_settings.json +++ b/assets/settings/initial_user_settings.json @@ -12,6 +12,6 @@ "theme": { "mode": "system", "light": "One Light", - "dark": "One Dark" - } + "dark": "One Dark", + }, } diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index 7c84c603bda7fd7590067ec9f566f3582ba6aefd..e2b7c3c91fca46ab0e4064719bea5c8793faaccc 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -45,6 +45,7 @@ "tab.inactive_background": "#1f2127ff", "tab.active_background": "#0d1016ff", "search.match_background": "#5ac2fe66", + "search.active_match_background": "#ea570166", "panel.background": "#1f2127ff", "panel.focused_border": "#5ac1feff", "pane.focused_border": null, @@ -436,6 +437,7 @@ "tab.inactive_background": "#ececedff", "tab.active_background": "#fcfcfcff", "search.match_background": "#3b9ee566", + "search.active_match_background": "#f88b3666", "panel.background": "#ececedff", "panel.focused_border": "#3b9ee5ff", "pane.focused_border": null, @@ -827,6 +829,7 @@ "tab.inactive_background": "#353944ff", "tab.active_background": "#242835ff", "search.match_background": "#73cffe66", + "search.active_match_background": "#fd722b66", "panel.background": "#353944ff", "panel.focused_border": null, "pane.focused_border": null, diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index a0f0a3ad637a4d212c8bf38f95f2e8424919d6bf..16ae188712f7a800ab4fb8a81a2d24cac99da56b 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -46,6 +46,7 @@ "tab.inactive_background": "#3a3735ff", "tab.active_background": "#282828ff", "search.match_background": "#83a59866", + "search.active_match_background": "#c09f3f66", "panel.background": "#3a3735ff", "panel.focused_border": "#83a598ff", "pane.focused_border": null, @@ -70,33 +71,33 @@ "editor.document_highlight.read_background": "#83a5981a", "editor.document_highlight.write_background": "#92847466", "terminal.background": "#282828ff", - "terminal.foreground": "#fbf1c7ff", + "terminal.foreground": "#ebdbb2ff", "terminal.bright_foreground": "#fbf1c7ff", - "terminal.dim_foreground": "#282828ff", + "terminal.dim_foreground": "#766b5dff", "terminal.ansi.black": "#282828ff", - "terminal.ansi.bright_black": "#73675eff", + "terminal.ansi.bright_black": "#928374ff", "terminal.ansi.dim_black": "#fbf1c7ff", - "terminal.ansi.red": "#fb4a35ff", - "terminal.ansi.bright_red": "#93201dff", - "terminal.ansi.dim_red": "#ffaa95ff", - "terminal.ansi.green": "#b7bb26ff", - "terminal.ansi.bright_green": "#605c1bff", - "terminal.ansi.dim_green": "#e0dc98ff", - "terminal.ansi.yellow": "#f9bd2fff", - "terminal.ansi.bright_yellow": "#91611bff", - "terminal.ansi.dim_yellow": "#fedc9bff", - "terminal.ansi.blue": "#83a598ff", - "terminal.ansi.bright_blue": "#414f4aff", - "terminal.ansi.dim_blue": "#c0d2cbff", - "terminal.ansi.magenta": "#d3869bff", - "terminal.ansi.bright_magenta": "#8e5868ff", - "terminal.ansi.dim_magenta": "#ff9ebbff", - "terminal.ansi.cyan": "#8ec07cff", - "terminal.ansi.bright_cyan": "#45603eff", - "terminal.ansi.dim_cyan": "#c7dfbdff", - "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#fb4934ff", + "terminal.ansi.dim_red": "#8e1814ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#b8bb26ff", + "terminal.ansi.dim_green": "#6a6912ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#fabd2fff", + "terminal.ansi.dim_yellow": "#966a17ff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#83a598ff", + "terminal.ansi.dim_blue": "#305d5fff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#d3869bff", + "terminal.ansi.dim_magenta": "#7c455eff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#8ec07cff", + "terminal.ansi.dim_cyan": "#496e4aff", + "terminal.ansi.white": "#a89984ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#766b5dff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", "version_control.modified": "#f9bd2fff", @@ -452,6 +453,7 @@ "tab.inactive_background": "#393634ff", "tab.active_background": "#1d2021ff", "search.match_background": "#83a59866", + "search.active_match_background": "#c9653666", "panel.background": "#393634ff", "panel.focused_border": "#83a598ff", "pane.focused_border": null, @@ -476,33 +478,33 @@ "editor.document_highlight.read_background": "#83a5981a", "editor.document_highlight.write_background": "#92847466", "terminal.background": "#1d2021ff", - "terminal.foreground": "#fbf1c7ff", + "terminal.foreground": "#ebdbb2ff", "terminal.bright_foreground": "#fbf1c7ff", - "terminal.dim_foreground": "#1d2021ff", - "terminal.ansi.black": "#1d2021ff", - "terminal.ansi.bright_black": "#73675eff", + "terminal.dim_foreground": "#766b5dff", + "terminal.ansi.black": "#282828ff", + "terminal.ansi.bright_black": "#928374ff", "terminal.ansi.dim_black": "#fbf1c7ff", - "terminal.ansi.red": "#fb4a35ff", - "terminal.ansi.bright_red": "#93201dff", - "terminal.ansi.dim_red": "#ffaa95ff", - "terminal.ansi.green": "#b7bb26ff", - "terminal.ansi.bright_green": "#605c1bff", - "terminal.ansi.dim_green": "#e0dc98ff", - "terminal.ansi.yellow": "#f9bd2fff", - "terminal.ansi.bright_yellow": "#91611bff", - "terminal.ansi.dim_yellow": "#fedc9bff", - "terminal.ansi.blue": "#83a598ff", - "terminal.ansi.bright_blue": "#414f4aff", - "terminal.ansi.dim_blue": "#c0d2cbff", - "terminal.ansi.magenta": "#d3869bff", - "terminal.ansi.bright_magenta": "#8e5868ff", - "terminal.ansi.dim_magenta": "#ff9ebbff", - "terminal.ansi.cyan": "#8ec07cff", - "terminal.ansi.bright_cyan": "#45603eff", - "terminal.ansi.dim_cyan": "#c7dfbdff", - "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#fb4934ff", + "terminal.ansi.dim_red": "#8e1814ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#b8bb26ff", + "terminal.ansi.dim_green": "#6a6912ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#fabd2fff", + "terminal.ansi.dim_yellow": "#966a17ff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#83a598ff", + "terminal.ansi.dim_blue": "#305d5fff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#d3869bff", + "terminal.ansi.dim_magenta": "#7c455eff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#8ec07cff", + "terminal.ansi.dim_cyan": "#496e4aff", + "terminal.ansi.white": "#a89984ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#766b5dff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", "version_control.modified": "#f9bd2fff", @@ -858,6 +860,7 @@ "tab.inactive_background": "#3b3735ff", "tab.active_background": "#32302fff", "search.match_background": "#83a59866", + "search.active_match_background": "#aea85166", "panel.background": "#3b3735ff", "panel.focused_border": null, "pane.focused_border": null, @@ -882,33 +885,33 @@ "editor.document_highlight.read_background": "#83a5981a", "editor.document_highlight.write_background": "#92847466", "terminal.background": "#32302fff", - "terminal.foreground": "#fbf1c7ff", + "terminal.foreground": "#ebdbb2ff", "terminal.bright_foreground": "#fbf1c7ff", - "terminal.dim_foreground": "#32302fff", - "terminal.ansi.black": "#32302fff", - "terminal.ansi.bright_black": "#73675eff", + "terminal.dim_foreground": "#766b5dff", + "terminal.ansi.black": "#282828ff", + "terminal.ansi.bright_black": "#928374ff", "terminal.ansi.dim_black": "#fbf1c7ff", - "terminal.ansi.red": "#fb4a35ff", - "terminal.ansi.bright_red": "#93201dff", - "terminal.ansi.dim_red": "#ffaa95ff", - "terminal.ansi.green": "#b7bb26ff", - "terminal.ansi.bright_green": "#605c1bff", - "terminal.ansi.dim_green": "#e0dc98ff", - "terminal.ansi.yellow": "#f9bd2fff", - "terminal.ansi.bright_yellow": "#91611bff", - "terminal.ansi.dim_yellow": "#fedc9bff", - "terminal.ansi.blue": "#83a598ff", - "terminal.ansi.bright_blue": "#414f4aff", - "terminal.ansi.dim_blue": "#c0d2cbff", - "terminal.ansi.magenta": "#d3869bff", - "terminal.ansi.bright_magenta": "#8e5868ff", - "terminal.ansi.dim_magenta": "#ff9ebbff", - "terminal.ansi.cyan": "#8ec07cff", - "terminal.ansi.bright_cyan": "#45603eff", - "terminal.ansi.dim_cyan": "#c7dfbdff", - "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#fb4934ff", + "terminal.ansi.dim_red": "#8e1814ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#b8bb26ff", + "terminal.ansi.dim_green": "#6a6912ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#fabd2fff", + "terminal.ansi.dim_yellow": "#966a17ff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#83a598ff", + "terminal.ansi.dim_blue": "#305d5fff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#d3869bff", + "terminal.ansi.dim_magenta": "#7c455eff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#8ec07cff", + "terminal.ansi.dim_cyan": "#496e4aff", + "terminal.ansi.white": "#a89984ff", + "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.dim_white": "#766b5dff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", "version_control.modified": "#f9bd2fff", @@ -1264,6 +1267,7 @@ "tab.inactive_background": "#ecddb4ff", "tab.active_background": "#fbf1c7ff", "search.match_background": "#0b667866", + "search.active_match_background": "#ba2d1166", "panel.background": "#ecddb4ff", "panel.focused_border": null, "pane.focused_border": null, @@ -1291,30 +1295,30 @@ "terminal.foreground": "#282828ff", "terminal.bright_foreground": "#282828ff", "terminal.dim_foreground": "#fbf1c7ff", - "terminal.ansi.black": "#282828ff", - "terminal.ansi.bright_black": "#0b6678ff", - "terminal.ansi.dim_black": "#5f5650ff", - "terminal.ansi.red": "#9d0308ff", - "terminal.ansi.bright_red": "#db8b7aff", - "terminal.ansi.dim_red": "#4e1207ff", - "terminal.ansi.green": "#797410ff", - "terminal.ansi.bright_green": "#bfb787ff", - "terminal.ansi.dim_green": "#3e3a11ff", - "terminal.ansi.yellow": "#b57615ff", - "terminal.ansi.bright_yellow": "#e2b88bff", - "terminal.ansi.dim_yellow": "#5c3a12ff", - "terminal.ansi.blue": "#0b6678ff", - "terminal.ansi.bright_blue": "#8fb0baff", - "terminal.ansi.dim_blue": "#14333bff", - "terminal.ansi.magenta": "#8f3e71ff", - "terminal.ansi.bright_magenta": "#c76da0ff", - "terminal.ansi.dim_magenta": "#5c2848ff", - "terminal.ansi.cyan": "#437b59ff", - "terminal.ansi.bright_cyan": "#9fbca8ff", - "terminal.ansi.dim_cyan": "#253e2eff", - "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.black": "#fbf1c7ff", + "terminal.ansi.bright_black": "#928374ff", + "terminal.ansi.dim_black": "#7c6f64ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#9d0006ff", + "terminal.ansi.dim_red": "#c31c16ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#79740eff", + "terminal.ansi.dim_green": "#929015ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#b57614ff", + "terminal.ansi.dim_yellow": "#cf8e1aff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#076678ff", + "terminal.ansi.dim_blue": "#356f77ff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#8f3f71ff", + "terminal.ansi.dim_magenta": "#a85580ff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#427b58ff", + "terminal.ansi.dim_cyan": "#5f9166ff", + "terminal.ansi.white": "#7c6f64ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#282828ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", "version_control.modified": "#b57615ff", @@ -1670,6 +1674,7 @@ "tab.inactive_background": "#ecddb5ff", "tab.active_background": "#f9f5d7ff", "search.match_background": "#0b667866", + "search.active_match_background": "#dc351466", "panel.background": "#ecddb5ff", "panel.focused_border": null, "pane.focused_border": null, @@ -1697,30 +1702,30 @@ "terminal.foreground": "#282828ff", "terminal.bright_foreground": "#282828ff", "terminal.dim_foreground": "#f9f5d7ff", - "terminal.ansi.black": "#282828ff", - "terminal.ansi.bright_black": "#73675eff", - "terminal.ansi.dim_black": "#f9f5d7ff", - "terminal.ansi.red": "#9d0308ff", - "terminal.ansi.bright_red": "#db8b7aff", - "terminal.ansi.dim_red": "#4e1207ff", - "terminal.ansi.green": "#797410ff", - "terminal.ansi.bright_green": "#bfb787ff", - "terminal.ansi.dim_green": "#3e3a11ff", - "terminal.ansi.yellow": "#b57615ff", - "terminal.ansi.bright_yellow": "#e2b88bff", - "terminal.ansi.dim_yellow": "#5c3a12ff", - "terminal.ansi.blue": "#0b6678ff", - "terminal.ansi.bright_blue": "#8fb0baff", - "terminal.ansi.dim_blue": "#14333bff", - "terminal.ansi.magenta": "#8f3e71ff", - "terminal.ansi.bright_magenta": "#c76da0ff", - "terminal.ansi.dim_magenta": "#5c2848ff", - "terminal.ansi.cyan": "#437b59ff", - "terminal.ansi.bright_cyan": "#9fbca8ff", - "terminal.ansi.dim_cyan": "#253e2eff", - "terminal.ansi.white": "#f9f5d7ff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.black": "#fbf1c7ff", + "terminal.ansi.bright_black": "#928374ff", + "terminal.ansi.dim_black": "#7c6f64ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#9d0006ff", + "terminal.ansi.dim_red": "#c31c16ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#79740eff", + "terminal.ansi.dim_green": "#929015ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#b57614ff", + "terminal.ansi.dim_yellow": "#cf8e1aff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#076678ff", + "terminal.ansi.dim_blue": "#356f77ff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#8f3f71ff", + "terminal.ansi.dim_magenta": "#a85580ff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#427b58ff", + "terminal.ansi.dim_cyan": "#5f9166ff", + "terminal.ansi.white": "#7c6f64ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#282828ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", "version_control.modified": "#b57615ff", @@ -2076,6 +2081,7 @@ "tab.inactive_background": "#ecdcb3ff", "tab.active_background": "#f2e5bcff", "search.match_background": "#0b667866", + "search.active_match_background": "#d7331466", "panel.background": "#ecdcb3ff", "panel.focused_border": null, "pane.focused_border": null, @@ -2103,30 +2109,30 @@ "terminal.foreground": "#282828ff", "terminal.bright_foreground": "#282828ff", "terminal.dim_foreground": "#f2e5bcff", - "terminal.ansi.black": "#282828ff", - "terminal.ansi.bright_black": "#73675eff", - "terminal.ansi.dim_black": "#f2e5bcff", - "terminal.ansi.red": "#9d0308ff", - "terminal.ansi.bright_red": "#db8b7aff", - "terminal.ansi.dim_red": "#4e1207ff", - "terminal.ansi.green": "#797410ff", - "terminal.ansi.bright_green": "#bfb787ff", - "terminal.ansi.dim_green": "#3e3a11ff", - "terminal.ansi.yellow": "#b57615ff", - "terminal.ansi.bright_yellow": "#e2b88bff", - "terminal.ansi.dim_yellow": "#5c3a12ff", - "terminal.ansi.blue": "#0b6678ff", - "terminal.ansi.bright_blue": "#8fb0baff", - "terminal.ansi.dim_blue": "#14333bff", - "terminal.ansi.magenta": "#8f3e71ff", - "terminal.ansi.bright_magenta": "#c76da0ff", - "terminal.ansi.dim_magenta": "#5c2848ff", - "terminal.ansi.cyan": "#437b59ff", - "terminal.ansi.bright_cyan": "#9fbca8ff", - "terminal.ansi.dim_cyan": "#253e2eff", - "terminal.ansi.white": "#f2e5bcff", - "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#b0a189ff", + "terminal.ansi.black": "#fbf1c7ff", + "terminal.ansi.bright_black": "#928374ff", + "terminal.ansi.dim_black": "#7c6f64ff", + "terminal.ansi.red": "#cc241dff", + "terminal.ansi.bright_red": "#9d0006ff", + "terminal.ansi.dim_red": "#c31c16ff", + "terminal.ansi.green": "#98971aff", + "terminal.ansi.bright_green": "#79740eff", + "terminal.ansi.dim_green": "#929015ff", + "terminal.ansi.yellow": "#d79921ff", + "terminal.ansi.bright_yellow": "#b57614ff", + "terminal.ansi.dim_yellow": "#cf8e1aff", + "terminal.ansi.blue": "#458588ff", + "terminal.ansi.bright_blue": "#076678ff", + "terminal.ansi.dim_blue": "#356f77ff", + "terminal.ansi.magenta": "#b16286ff", + "terminal.ansi.bright_magenta": "#8f3f71ff", + "terminal.ansi.dim_magenta": "#a85580ff", + "terminal.ansi.cyan": "#689d6aff", + "terminal.ansi.bright_cyan": "#427b58ff", + "terminal.ansi.dim_cyan": "#5f9166ff", + "terminal.ansi.white": "#7c6f64ff", + "terminal.ansi.bright_white": "#282828ff", + "terminal.ansi.dim_white": "#282828ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", "version_control.modified": "#b57615ff", diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 6849cd05dc70752216789ae04e81fad232f7b14b..13f94991ad44fc997144a3d44527dcbce5231504 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -45,6 +45,7 @@ "tab.inactive_background": "#2f343eff", "tab.active_background": "#282c33ff", "search.match_background": "#74ade866", + "search.active_match_background": "#e8af7466", "panel.background": "#2f343eff", "panel.focused_border": null, "pane.focused_border": null, @@ -67,37 +68,39 @@ "editor.active_wrap_guide": "#c8ccd41a", "editor.document_highlight.read_background": "#74ade81a", "editor.document_highlight.write_background": "#555a6366", - "terminal.background": "#282c33ff", - "terminal.foreground": "#dce0e5ff", + "terminal.background": "#282c34ff", + "terminal.foreground": "#abb2bfff", "terminal.bright_foreground": "#dce0e5ff", - "terminal.dim_foreground": "#282c33ff", - "terminal.ansi.black": "#282c33ff", - "terminal.ansi.bright_black": "#525561ff", - "terminal.ansi.dim_black": "#dce0e5ff", - "terminal.ansi.red": "#d07277ff", - "terminal.ansi.bright_red": "#673a3cff", - "terminal.ansi.dim_red": "#eab7b9ff", - "terminal.ansi.green": "#a1c181ff", - "terminal.ansi.bright_green": "#4d6140ff", - "terminal.ansi.dim_green": "#d1e0bfff", - "terminal.ansi.yellow": "#dec184ff", - "terminal.ansi.bright_yellow": "#e5c07bff", - "terminal.ansi.dim_yellow": "#f1dfc1ff", - "terminal.ansi.blue": "#74ade8ff", - "terminal.ansi.bright_blue": "#385378ff", - "terminal.ansi.dim_blue": "#bed5f4ff", - "terminal.ansi.magenta": "#b477cfff", - "terminal.ansi.bright_magenta": "#d6b4e4ff", - "terminal.ansi.dim_magenta": "#612a79ff", - "terminal.ansi.cyan": "#6eb4bfff", - "terminal.ansi.bright_cyan": "#3a565bff", - "terminal.ansi.dim_cyan": "#b9d9dfff", - "terminal.ansi.white": "#dce0e5ff", + "terminal.dim_foreground": "#636d83ff", + "terminal.ansi.black": "#282c34ff", + "terminal.ansi.bright_black": "#636d83ff", + "terminal.ansi.dim_black": "#3b3f4aff", + "terminal.ansi.red": "#e06c75ff", + "terminal.ansi.bright_red": "#EA858Bff", + "terminal.ansi.dim_red": "#a7545aff", + "terminal.ansi.green": "#98c379ff", + "terminal.ansi.bright_green": "#AAD581ff", + "terminal.ansi.dim_green": "#6d8f59ff", + "terminal.ansi.yellow": "#e5c07bff", + "terminal.ansi.bright_yellow": "#FFD885ff", + "terminal.ansi.dim_yellow": "#b8985bff", + "terminal.ansi.blue": "#61afefff", + "terminal.ansi.bright_blue": "#85C1FFff", + "terminal.ansi.dim_blue": "#457cadff", + "terminal.ansi.magenta": "#c678ddff", + "terminal.ansi.bright_magenta": "#D398EBff", + "terminal.ansi.dim_magenta": "#8d54a0ff", + "terminal.ansi.cyan": "#56b6c2ff", + "terminal.ansi.bright_cyan": "#6ED5DEff", + "terminal.ansi.dim_cyan": "#3c818aff", + "terminal.ansi.white": "#abb2bfff", "terminal.ansi.bright_white": "#fafafaff", - "terminal.ansi.dim_white": "#575d65ff", + "terminal.ansi.dim_white": "#8f969bff", "link_text.hover": "#74ade8ff", "version_control.added": "#27a657ff", "version_control.modified": "#d3b020ff", + "version_control.word_added": "#2EA04859", + "version_control.word_deleted": "#78081BCC", "version_control.deleted": "#e06c76ff", "version_control.conflict_marker.ours": "#a1c1811a", "version_control.conflict_marker.theirs": "#74ade81a", @@ -446,6 +449,7 @@ "tab.inactive_background": "#ebebecff", "tab.active_background": "#fafafaff", "search.match_background": "#5c79e266", + "search.active_match_background": "#d0a92366", "panel.background": "#ebebecff", "panel.focused_border": null, "pane.focused_border": null, @@ -469,36 +473,38 @@ "editor.document_highlight.read_background": "#5c78e225", "editor.document_highlight.write_background": "#a3a3a466", "terminal.background": "#fafafaff", - "terminal.foreground": "#242529ff", - "terminal.bright_foreground": "#242529ff", - "terminal.dim_foreground": "#fafafaff", - "terminal.ansi.black": "#242529ff", - "terminal.ansi.bright_black": "#747579ff", - "terminal.ansi.dim_black": "#97979aff", - "terminal.ansi.red": "#d36151ff", - "terminal.ansi.bright_red": "#f0b0a4ff", - "terminal.ansi.dim_red": "#6f312aff", - "terminal.ansi.green": "#669f59ff", - "terminal.ansi.bright_green": "#b2cfa9ff", - "terminal.ansi.dim_green": "#354d2eff", - "terminal.ansi.yellow": "#dec184ff", - "terminal.ansi.bright_yellow": "#826221ff", - "terminal.ansi.dim_yellow": "#786441ff", - "terminal.ansi.blue": "#5c78e2ff", - "terminal.ansi.bright_blue": "#b5baf2ff", - "terminal.ansi.dim_blue": "#2d3d75ff", - "terminal.ansi.magenta": "#984ea5ff", - "terminal.ansi.bright_magenta": "#cea6d3ff", - "terminal.ansi.dim_magenta": "#4b2a50ff", - "terminal.ansi.cyan": "#3a82b7ff", - "terminal.ansi.bright_cyan": "#a3bedaff", - "terminal.ansi.dim_cyan": "#254058ff", - "terminal.ansi.white": "#fafafaff", + "terminal.foreground": "#2a2c33ff", + "terminal.bright_foreground": "#2a2c33ff", + "terminal.dim_foreground": "#bbbbbbff", + "terminal.ansi.black": "#000000ff", + "terminal.ansi.bright_black": "#000000ff", + "terminal.ansi.dim_black": "#555555ff", + "terminal.ansi.red": "#de3e35ff", + "terminal.ansi.bright_red": "#de3e35ff", + "terminal.ansi.dim_red": "#9c2b26ff", + "terminal.ansi.green": "#3f953aff", + "terminal.ansi.bright_green": "#3f953aff", + "terminal.ansi.dim_green": "#2b6927ff", + "terminal.ansi.yellow": "#d2b67cff", + "terminal.ansi.bright_yellow": "#d2b67cff", + "terminal.ansi.dim_yellow": "#a48c5aff", + "terminal.ansi.blue": "#2f5af3ff", + "terminal.ansi.bright_blue": "#2f5af3ff", + "terminal.ansi.dim_blue": "#2140abff", + "terminal.ansi.magenta": "#950095ff", + "terminal.ansi.bright_magenta": "#a00095ff", + "terminal.ansi.dim_magenta": "#6a006aff", + "terminal.ansi.cyan": "#3f953aff", + "terminal.ansi.bright_cyan": "#3f953aff", + "terminal.ansi.dim_cyan": "#2b6927ff", + "terminal.ansi.white": "#bbbbbbff", "terminal.ansi.bright_white": "#ffffffff", - "terminal.ansi.dim_white": "#aaaaaaff", + "terminal.ansi.dim_white": "#888888ff", "link_text.hover": "#5c78e2ff", "version_control.added": "#27a657ff", "version_control.modified": "#d3b020ff", + "version_control.word_added": "#2EA04859", + "version_control.word_deleted": "#F85149CC", "version_control.deleted": "#e06c76ff", "conflict": "#a48819ff", "conflict.background": "#faf2e6ff", diff --git a/clippy.toml b/clippy.toml index 0ce7a6cd68d4e8210788eb7a67aa06c742cc8274..9dd246074a06c4db7b66eff7a83ef68e3612c378 100644 --- a/clippy.toml +++ b/clippy.toml @@ -14,6 +14,7 @@ disallowed-methods = [ { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" }, { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, + { path = "cocoa::foundation::NSString::alloc", reason = "NSString must be autoreleased to avoid memory leaks. Use `ns_string()` helper instead." }, ] disallowed-types = [ # { path = "std::collections::HashMap", replacement = "collections::HashMap" }, diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index 8ef6f1a52c8b207658d59a1e6b877964df9e42ce..70f2e4d259f1611fb42ebc0b064d278c8b3b9c4d 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -46,6 +46,7 @@ url.workspace = true util.workspace = true uuid.workspace = true watch.workspace = true +urlencoding.workspace = true [dev-dependencies] env_logger.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index a63dabf1fb25258b6f4255a5c67682165371b255..2ec6347fd4aa088d7ae2cc8f5a7b6cef37d3b202 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -43,6 +43,7 @@ pub struct UserMessage { pub content: ContentBlock, pub chunks: Vec, pub checkpoint: Option, + pub indented: bool, } #[derive(Debug)] @@ -73,6 +74,7 @@ impl UserMessage { #[derive(Debug, PartialEq)] pub struct AssistantMessage { pub chunks: Vec, + pub indented: bool, } impl AssistantMessage { @@ -123,6 +125,14 @@ pub enum AgentThreadEntry { } impl AgentThreadEntry { + pub fn is_indented(&self) -> bool { + match self { + Self::UserMessage(message) => message.indented, + Self::AssistantMessage(message) => message.indented, + Self::ToolCall(_) => false, + } + } + pub fn to_markdown(&self, cx: &App) -> String { match self { Self::UserMessage(message) => message.to_markdown(cx), @@ -201,17 +211,19 @@ impl ToolCall { }; let mut content = Vec::with_capacity(tool_call.content.len()); for item in tool_call.content { - content.push(ToolCallContent::from_acp( + if let Some(item) = ToolCallContent::from_acp( item, language_registry.clone(), path_style, terminals, cx, - )?); + )? { + content.push(item); + } } let result = Self { - id: tool_call.id, + id: tool_call.tool_call_id, label: cx .new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)), kind: tool_call.kind, @@ -241,6 +253,7 @@ impl ToolCall { locations, raw_input, raw_output, + .. } = fields; if let Some(kind) = kind { @@ -262,21 +275,29 @@ impl ToolCall { } if let Some(content) = content { - let new_content_len = content.len(); + let mut new_content_len = content.len(); let mut content = content.into_iter(); // Reuse existing content if we can for (old, new) in self.content.iter_mut().zip(content.by_ref()) { - old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?; + let valid_content = + old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?; + if !valid_content { + new_content_len -= 1; + } } for new in content { - self.content.push(ToolCallContent::from_acp( + if let Some(new) = ToolCallContent::from_acp( new, language_registry.clone(), path_style, terminals, cx, - )?) + )? { + self.content.push(new); + } else { + new_content_len -= 1; + } } self.content.truncate(new_content_len); } @@ -347,13 +368,13 @@ impl ToolCall { let buffer = buffer.await.log_err()?; let position = buffer .update(cx, |buffer, _| { + let snapshot = buffer.snapshot(); if let Some(row) = location.line { - let snapshot = buffer.snapshot(); let column = snapshot.indent_size_for_line(row).len; let point = snapshot.clip_point(Point::new(row, column), Bias::Left); snapshot.anchor_before(point) } else { - Anchor::MIN + Anchor::min_for_buffer(snapshot.remote_id()) } }) .ok()?; @@ -425,6 +446,7 @@ impl From for ToolCallStatus { acp::ToolCallStatus::InProgress => Self::InProgress, acp::ToolCallStatus::Completed => Self::Completed, acp::ToolCallStatus::Failed => Self::Failed, + _ => Self::Pending, } } } @@ -537,7 +559,7 @@ impl ContentBlock { .. }) => Self::resource_link_md(&uri, path_style), acp::ContentBlock::Image(image) => Self::image_md(&image), - acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(), + _ => String::new(), } } @@ -591,15 +613,17 @@ impl ToolCallContent { path_style: PathStyle, terminals: &HashMap>, cx: &mut App, - ) -> Result { + ) -> Result> { match content { - acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new( - content, - &language_registry, - path_style, - cx, - ))), - acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| { + acp::ToolCallContent::Content(acp::Content { content, .. }) => { + Ok(Some(Self::ContentBlock(ContentBlock::new( + content, + &language_registry, + path_style, + cx, + )))) + } + acp::ToolCallContent::Diff(diff) => Ok(Some(Self::Diff(cx.new(|cx| { Diff::finalized( diff.path.to_string_lossy().into_owned(), diff.old_text, @@ -607,12 +631,13 @@ impl ToolCallContent { language_registry, cx, ) - }))), - acp::ToolCallContent::Terminal { terminal_id } => terminals + })))), + acp::ToolCallContent::Terminal(acp::Terminal { terminal_id, .. }) => terminals .get(&terminal_id) .cloned() - .map(Self::Terminal) + .map(|terminal| Some(Self::Terminal(terminal))) .ok_or_else(|| anyhow::anyhow!("Terminal with id `{}` not found", terminal_id)), + _ => Ok(None), } } @@ -623,9 +648,9 @@ impl ToolCallContent { path_style: PathStyle, terminals: &HashMap>, cx: &mut App, - ) -> Result<()> { + ) -> Result { let needs_update = match (&self, &new) { - (Self::Diff(old_diff), acp::ToolCallContent::Diff { diff: new_diff }) => { + (Self::Diff(old_diff), acp::ToolCallContent::Diff(new_diff)) => { old_diff.read(cx).needs_update( new_diff.old_text.as_deref().unwrap_or(""), &new_diff.new_text, @@ -635,10 +660,14 @@ impl ToolCallContent { _ => true, }; - if needs_update { - *self = Self::from_acp(new, language_registry, path_style, terminals, cx)?; + if let Some(update) = Self::from_acp(new, language_registry, path_style, terminals, cx)? { + if needs_update { + *self = update; + } + Ok(true) + } else { + Ok(false) } - Ok(()) } pub fn to_markdown(&self, cx: &App) -> String { @@ -660,7 +689,7 @@ pub enum ToolCallUpdate { impl ToolCallUpdate { fn id(&self) -> &acp::ToolCallId { match self { - Self::UpdateFields(update) => &update.id, + Self::UpdateFields(update) => &update.tool_call_id, Self::UpdateDiff(diff) => &diff.id, Self::UpdateTerminal(terminal) => &terminal.id, } @@ -732,6 +761,7 @@ impl Plan { acp::PlanEntryStatus::Completed => { stats.completed += 1; } + _ => {} } } @@ -1154,6 +1184,7 @@ impl AcpThread { current_mode_id, .. }) => cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id)), + _ => {} } Ok(()) } @@ -1163,6 +1194,16 @@ impl AcpThread { message_id: Option, chunk: acp::ContentBlock, cx: &mut Context, + ) { + self.push_user_content_block_with_indent(message_id, chunk, false, cx) + } + + pub fn push_user_content_block_with_indent( + &mut self, + message_id: Option, + chunk: acp::ContentBlock, + indented: bool, + cx: &mut Context, ) { let language_registry = self.project.read(cx).languages().clone(); let path_style = self.project.read(cx).path_style(cx); @@ -1173,8 +1214,10 @@ impl AcpThread { id, content, chunks, + indented: existing_indented, .. }) = last_entry + && *existing_indented == indented { *id = message_id.or(id.take()); content.append(chunk.clone(), &language_registry, path_style, cx); @@ -1189,6 +1232,7 @@ impl AcpThread { content, chunks: vec![chunk], checkpoint: None, + indented, }), cx, ); @@ -1200,12 +1244,26 @@ impl AcpThread { chunk: acp::ContentBlock, is_thought: bool, cx: &mut Context, + ) { + self.push_assistant_content_block_with_indent(chunk, is_thought, false, cx) + } + + pub fn push_assistant_content_block_with_indent( + &mut self, + chunk: acp::ContentBlock, + is_thought: bool, + indented: bool, + cx: &mut Context, ) { let language_registry = self.project.read(cx).languages().clone(); let path_style = self.project.read(cx).path_style(cx); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() - && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry + && let AgentThreadEntry::AssistantMessage(AssistantMessage { + chunks, + indented: existing_indented, + }) = last_entry + && *existing_indented == indented { let idx = entries_len - 1; cx.emit(AcpThreadEvent::EntryUpdated(idx)); @@ -1234,6 +1292,7 @@ impl AcpThread { self.push_entry( AgentThreadEntry::AssistantMessage(AssistantMessage { chunks: vec![chunk], + indented, }), cx, ); @@ -1287,11 +1346,7 @@ impl AcpThread { label: cx.new(|cx| Markdown::new("Tool call not found".into(), None, None, cx)), kind: acp::ToolKind::Fetch, content: vec![ToolCallContent::ContentBlock(ContentBlock::new( - acp::ContentBlock::Text(acp::TextContent { - text: "Tool call not found".to_string(), - annotations: None, - meta: None, - }), + "Tool call not found".into(), &languages, path_style, cx, @@ -1315,7 +1370,7 @@ impl AcpThread { let location_updated = update.fields.locations.is_some(); call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?; if location_updated { - self.resolve_locations(update.id, cx); + self.resolve_locations(update.tool_call_id, cx); } } ToolCallUpdate::UpdateDiff(update) => { @@ -1353,9 +1408,9 @@ impl AcpThread { ) -> Result<(), acp::Error> { let language_registry = self.project.read(cx).languages().clone(); let path_style = self.project.read(cx).path_style(cx); - let id = update.id.clone(); + let id = update.tool_call_id.clone(); - let agent = self.connection().telemetry_id(); + let agent_telemetry_id = self.connection().telemetry_id(); let session = self.session_id(); if let ToolCallStatus::Completed | ToolCallStatus::Failed = status { let status = if matches!(status, ToolCallStatus::Completed) { @@ -1363,7 +1418,12 @@ impl AcpThread { } else { "failed" }; - telemetry::event!("Agent Tool Call Completed", agent, session, status); + telemetry::event!( + "Agent Tool Call Completed", + agent_telemetry_id, + session, + status + ); } if let Some(ix) = self.index_for_tool_call(&id) { @@ -1518,16 +1578,16 @@ impl AcpThread { // some tools would (incorrectly) continue to auto-accept. if let Some(allow_once_option) = options.iter().find_map(|option| { if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) { - Some(option.id.clone()) + Some(option.option_id.clone()) } else { None } }) { self.upsert_tool_call_inner(tool_call, ToolCallStatus::Pending, cx)?; return Ok(async { - acp::RequestPermissionOutcome::Selected { - option_id: allow_once_option, - } + acp::RequestPermissionOutcome::Selected(acp::SelectedPermissionOutcome::new( + allow_once_option, + )) } .boxed()); } @@ -1543,7 +1603,9 @@ impl AcpThread { let fut = async { match rx.await { - Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, + Ok(option) => acp::RequestPermissionOutcome::Selected( + acp::SelectedPermissionOutcome::new(option), + ), Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, } } @@ -1570,6 +1632,7 @@ impl AcpThread { acp::PermissionOptionKind::AllowOnce | acp::PermissionOptionKind::AllowAlways => { ToolCallStatus::InProgress } + _ => ToolCallStatus::InProgress, }; let curr_status = mem::replace(&mut call.status, new_status); @@ -1648,14 +1711,7 @@ impl AcpThread { message: &str, cx: &mut Context, ) -> BoxFuture<'static, Result<()>> { - self.send( - vec![acp::ContentBlock::Text(acp::TextContent { - text: message.to_string(), - annotations: None, - meta: None, - })], - cx, - ) + self.send(vec![message.into()], cx) } pub fn send( @@ -1669,11 +1725,7 @@ impl AcpThread { self.project.read(cx).path_style(cx), cx, ); - let request = acp::PromptRequest { - prompt: message.clone(), - session_id: self.session_id.clone(), - meta: None, - }; + let request = acp::PromptRequest::new(self.session_id.clone(), message.clone()); let git_store = self.project.read(cx).git_store().clone(); let message_id = if self.connection.truncate(&self.session_id, cx).is_some() { @@ -1690,6 +1742,7 @@ impl AcpThread { content: block, chunks: message, checkpoint: None, + indented: false, }), cx, ); @@ -1765,7 +1818,7 @@ impl AcpThread { result, Ok(Ok(acp::PromptResponse { stop_reason: acp::StopReason::Cancelled, - meta: None, + .. })) ); @@ -1781,7 +1834,7 @@ impl AcpThread { // Handle refusal - distinguish between user prompt and tool call refusals if let Ok(Ok(acp::PromptResponse { stop_reason: acp::StopReason::Refusal, - meta: _, + .. })) = result { if let Some((user_msg_ix, _)) = this.last_user_message() { @@ -1866,10 +1919,14 @@ impl AcpThread { .checkpoint .as_ref() .map(|c| c.git_checkpoint.clone()); + + // Cancel any in-progress generation before restoring + let cancel_task = self.cancel(cx); let rewind = self.rewind(id.clone(), cx); let git_store = self.project.read(cx).git_store().clone(); cx.spawn(async move |_, cx| { + cancel_task.await; rewind.await?; if let Some(checkpoint) = checkpoint { git_store @@ -1894,9 +1951,25 @@ impl AcpThread { cx.update(|cx| truncate.run(id.clone(), cx))?.await?; this.update(cx, |this, cx| { if let Some((ix, _)) = this.user_message_mut(&id) { + // Collect all terminals from entries that will be removed + let terminals_to_remove: Vec = this.entries[ix..] + .iter() + .flat_map(|entry| entry.terminals()) + .filter_map(|terminal| terminal.read(cx).id().clone().into()) + .collect(); + let range = ix..this.entries.len(); this.entries.truncate(ix); cx.emit(AcpThreadEvent::EntriesRemoved(range)); + + // Kill and remove the terminals + for terminal_id in terminals_to_remove { + if let Some(terminal) = this.terminals.remove(&terminal_id) { + terminal.update(cx, |terminal, cx| { + terminal.kill(cx); + }); + } + } } this.action_log().update(cx, |action_log, cx| { action_log.reject_all_edits(Some(telemetry), cx) @@ -1997,7 +2070,7 @@ impl AcpThread { })?; Ok(project.open_buffer(path, cx)) }) - .map_err(|e| acp::Error::internal_error().with_data(e.to_string())) + .map_err(|e| acp::Error::internal_error().data(e.to_string())) .flatten()?; let buffer = load.await?; @@ -2030,7 +2103,7 @@ impl AcpThread { let start_position = Point::new(line, 0); if start_position > max_point { - return Err(acp::Error::invalid_params().with_data(format!( + return Err(acp::Error::invalid_params().data(format!( "Attempting to read beyond the end of the file, line {}:{}", max_point.row + 1, max_point.column @@ -2100,7 +2173,7 @@ impl AcpThread { position: edits .last() .map(|(range, _)| range.end) - .unwrap_or(Anchor::MIN), + .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), }), cx, ); @@ -2182,7 +2255,7 @@ impl AcpThread { let language_registry = project.read(cx).languages().clone(); let is_windows = project.read(cx).path_style(cx).is_windows(); - let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into()); + let terminal_id = acp::TerminalId::new(Uuid::new_v4().to_string()); let terminal_task = cx.spawn({ let terminal_id = terminal_id.clone(); async move |_this, cx| { @@ -2392,7 +2465,7 @@ mod tests { .await .unwrap(); - let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into()); + let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); // Send Output BEFORE Created - should be buffered by acp_thread thread.update(cx, |thread, cx| { @@ -2454,7 +2527,7 @@ mod tests { .await .unwrap(); - let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into()); + let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); // Send Output BEFORE Created thread.update(cx, |thread, cx| { @@ -2472,11 +2545,7 @@ mod tests { thread.on_terminal_provider_event( TerminalProviderEvent::Exit { terminal_id: terminal_id.clone(), - status: acp::TerminalExitStatus { - exit_code: Some(0), - signal: None, - meta: None, - }, + status: acp::TerminalExitStatus::new().exit_code(0), }, cx, ); @@ -2533,15 +2602,7 @@ mod tests { // Test creating a new user message thread.update(cx, |thread, cx| { - thread.push_user_content_block( - None, - acp::ContentBlock::Text(acp::TextContent { - annotations: None, - text: "Hello, ".to_string(), - meta: None, - }), - cx, - ); + thread.push_user_content_block(None, "Hello, ".into(), cx); }); thread.update(cx, |thread, cx| { @@ -2557,15 +2618,7 @@ mod tests { // Test appending to existing user message let message_1_id = UserMessageId::new(); thread.update(cx, |thread, cx| { - thread.push_user_content_block( - Some(message_1_id.clone()), - acp::ContentBlock::Text(acp::TextContent { - annotations: None, - text: "world!".to_string(), - meta: None, - }), - cx, - ); + thread.push_user_content_block(Some(message_1_id.clone()), "world!".into(), cx); }); thread.update(cx, |thread, cx| { @@ -2580,26 +2633,14 @@ mod tests { // Test creating new user message after assistant message thread.update(cx, |thread, cx| { - thread.push_assistant_content_block( - acp::ContentBlock::Text(acp::TextContent { - annotations: None, - text: "Assistant response".to_string(), - meta: None, - }), - false, - cx, - ); + thread.push_assistant_content_block("Assistant response".into(), false, cx); }); let message_2_id = UserMessageId::new(); thread.update(cx, |thread, cx| { thread.push_user_content_block( Some(message_2_id.clone()), - acp::ContentBlock::Text(acp::TextContent { - annotations: None, - text: "New user message".to_string(), - meta: None, - }), + "New user message".into(), cx, ); }); @@ -2627,27 +2668,22 @@ mod tests { thread.update(&mut cx, |thread, cx| { thread .handle_session_update( - acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { - content: "Thinking ".into(), - meta: None, - }), + acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk::new( + "Thinking ".into(), + )), cx, ) .unwrap(); thread .handle_session_update( - acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { - content: "hard!".into(), - meta: None, - }), + acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk::new( + "hard!".into(), + )), cx, ) .unwrap(); })?; - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() }, @@ -2715,10 +2751,7 @@ mod tests { .unwrap() .await .unwrap(); - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() }, @@ -2940,7 +2973,7 @@ mod tests { .await .unwrap_err(); - assert_eq!(err.code, acp::ErrorCode::RESOURCE_NOT_FOUND.code); + assert_eq!(err.code, acp::ErrorCode::ResourceNotFound); } #[gpui::test] @@ -2949,7 +2982,7 @@ mod tests { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let id = acp::ToolCallId("test".into()); + let id = acp::ToolCallId::new("test"); let connection = Rc::new(FakeAgentConnection::new().on_user_message({ let id = id.clone(); @@ -2959,26 +2992,17 @@ mod tests { thread .update(&mut cx, |thread, cx| { thread.handle_session_update( - acp::SessionUpdate::ToolCall(acp::ToolCall { - id: id.clone(), - title: "Label".into(), - kind: acp::ToolKind::Fetch, - status: acp::ToolCallStatus::InProgress, - content: vec![], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - }), + acp::SessionUpdate::ToolCall( + acp::ToolCall::new(id.clone(), "Label") + .kind(acp::ToolKind::Fetch) + .status(acp::ToolCallStatus::InProgress), + ), cx, ) }) .unwrap() .unwrap(); - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() } @@ -3020,14 +3044,10 @@ mod tests { thread .update(cx, |thread, cx| { thread.handle_session_update( - acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate { + acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate::new( id, - fields: acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::Completed), - ..Default::default() - }, - meta: None, - }), + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::Completed), + )), cx, ) }) @@ -3059,33 +3079,21 @@ mod tests { thread .update(&mut cx, |thread, cx| { thread.handle_session_update( - acp::SessionUpdate::ToolCall(acp::ToolCall { - id: acp::ToolCallId("test".into()), - title: "Label".into(), - kind: acp::ToolKind::Edit, - status: acp::ToolCallStatus::Completed, - content: vec![acp::ToolCallContent::Diff { - diff: acp::Diff { - path: "/test/test.txt".into(), - old_text: None, - new_text: "foo".into(), - meta: None, - }, - }], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - }), + acp::SessionUpdate::ToolCall( + acp::ToolCall::new("test", "Label") + .kind(acp::ToolKind::Edit) + .status(acp::ToolCallStatus::Completed) + .content(vec![acp::ToolCallContent::Diff(acp::Diff::new( + "/test/test.txt", + "foo", + ))]), + ), cx, ) }) .unwrap() .unwrap(); - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() } @@ -3138,18 +3146,14 @@ mod tests { thread.update(&mut cx, |thread, cx| { thread .handle_session_update( - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: content.text.to_uppercase().into(), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new( + content.text.to_uppercase().into(), + )), cx, ) .unwrap(); })?; - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() } @@ -3305,34 +3309,22 @@ mod tests { thread.update(&mut cx, |thread, cx| { thread .handle_session_update( - acp::SessionUpdate::ToolCall(acp::ToolCall { - id: acp::ToolCallId("tool1".into()), - title: "Test Tool".into(), - kind: acp::ToolKind::Fetch, - status: acp::ToolCallStatus::Completed, - content: vec![], - locations: vec![], - raw_input: Some(serde_json::json!({"query": "test"})), - raw_output: Some( - serde_json::json!({"result": "inappropriate content"}), - ), - meta: None, - }), + acp::SessionUpdate::ToolCall( + acp::ToolCall::new("tool1", "Test Tool") + .kind(acp::ToolKind::Fetch) + .status(acp::ToolCallStatus::Completed) + .raw_input(serde_json::json!({"query": "test"})) + .raw_output(serde_json::json!({"result": "inappropriate content"})), + ), cx, ) .unwrap(); })?; // Now return refusal because of the tool result - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Refusal, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::Refusal)) } else { - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } } .boxed_local() @@ -3360,16 +3352,7 @@ mod tests { }); // Send a user message - this will trigger tool call and then refusal - let send_task = thread.update(cx, |thread, cx| { - thread.send( - vec![acp::ContentBlock::Text(acp::TextContent { - text: "Hello".into(), - annotations: None, - meta: None, - })], - cx, - ) - }); + let send_task = thread.update(cx, |thread, cx| thread.send(vec!["Hello".into()], cx)); cx.background_executor.spawn(send_task).detach(); cx.run_until_parked(); @@ -3415,21 +3398,11 @@ mod tests { let refuse_next = refuse_next.clone(); move |_request, _thread, _cx| { if refuse_next.load(SeqCst) { - async move { - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Refusal, - meta: None, - }) - } - .boxed_local() + async move { Ok(acp::PromptResponse::new(acp::StopReason::Refusal)) } + .boxed_local() } else { - async move { - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) - } - .boxed_local() + async move { Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } + .boxed_local() } } })); @@ -3486,10 +3459,7 @@ mod tests { let refuse_next = refuse_next.clone(); async move { if refuse_next.load(SeqCst) { - return Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Refusal, - meta: None, - }); + return Ok(acp::PromptResponse::new(acp::StopReason::Refusal)); } let acp::ContentBlock::Text(content) = &request.prompt[0] else { @@ -3498,18 +3468,14 @@ mod tests { thread.update(&mut cx, |thread, cx| { thread .handle_session_update( - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: content.text.to_uppercase().into(), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new( + content.text.to_uppercase().into(), + )), cx, ) .unwrap(); })?; - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) } .boxed_local() } @@ -3634,8 +3600,8 @@ mod tests { } impl AgentConnection for FakeAgentConnection { - fn telemetry_id(&self) -> &'static str { - "fake" + fn telemetry_id(&self) -> SharedString { + "fake".into() } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -3648,13 +3614,12 @@ mod tests { _cwd: &Path, cx: &mut App, ) -> Task>> { - let session_id = acp::SessionId( + let session_id = acp::SessionId::new( rand::rng() .sample_iter(&distr::Alphanumeric) .take(7) .map(char::from) - .collect::() - .into(), + .collect::(), ); let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread = cx.new(|cx| { @@ -3664,12 +3629,12 @@ mod tests { project, action_log, session_id.clone(), - watch::Receiver::constant(acp::PromptCapabilities { - image: true, - audio: true, - embedded_context: true, - meta: None, - }), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), cx, ) }); @@ -3698,10 +3663,7 @@ mod tests { let thread = thread.clone(); cx.spawn(async move |cx| handler(params, thread, cx.clone()).await) } else { - Task::ready(Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - })) + Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))) } } @@ -3756,17 +3718,13 @@ mod tests { .unwrap(); // Try to update a tool call that doesn't exist - let nonexistent_id = acp::ToolCallId("nonexistent-tool-call".into()); + let nonexistent_id = acp::ToolCallId::new("nonexistent-tool-call"); thread.update(cx, |thread, cx| { let result = thread.handle_session_update( - acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate { - id: nonexistent_id.clone(), - fields: acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::Completed), - ..Default::default() - }, - meta: None, - }), + acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate::new( + nonexistent_id.clone(), + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::Completed), + )), cx, ); @@ -3803,4 +3761,300 @@ mod tests { } }); } + + /// Tests that restoring a checkpoint properly cleans up terminals that were + /// created after that checkpoint, and cancels any in-progress generation. + /// + /// Reproduces issue #35142: When a checkpoint is restored, any terminal processes + /// that were started after that checkpoint should be terminated, and any in-progress + /// AI generation should be canceled. + #[gpui::test] + async fn test_restore_checkpoint_kills_terminal(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let connection = Rc::new(FakeAgentConnection::new()); + let thread = cx + .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx)) + .await + .unwrap(); + + // Send first user message to create a checkpoint + cx.update(|cx| { + thread.update(cx, |thread, cx| { + thread.send(vec!["first message".into()], cx) + }) + }) + .await + .unwrap(); + + // Send second message (creates another checkpoint) - we'll restore to this one + cx.update(|cx| { + thread.update(cx, |thread, cx| { + thread.send(vec!["second message".into()], cx) + }) + }) + .await + .unwrap(); + + // Create 2 terminals BEFORE the checkpoint that have completed running + let terminal_id_1 = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); + let mock_terminal_1 = cx.new(|cx| { + let builder = ::terminal::TerminalBuilder::new_display_only( + ::terminal::terminal_settings::CursorShape::default(), + ::terminal::terminal_settings::AlternateScroll::On, + None, + 0, + ) + .unwrap(); + builder.subscribe(cx) + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Created { + terminal_id: terminal_id_1.clone(), + label: "echo 'first'".to_string(), + cwd: Some(PathBuf::from("/test")), + output_byte_limit: None, + terminal: mock_terminal_1.clone(), + }, + cx, + ); + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Output { + terminal_id: terminal_id_1.clone(), + data: b"first\n".to_vec(), + }, + cx, + ); + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Exit { + terminal_id: terminal_id_1.clone(), + status: acp::TerminalExitStatus::new().exit_code(0), + }, + cx, + ); + }); + + let terminal_id_2 = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); + let mock_terminal_2 = cx.new(|cx| { + let builder = ::terminal::TerminalBuilder::new_display_only( + ::terminal::terminal_settings::CursorShape::default(), + ::terminal::terminal_settings::AlternateScroll::On, + None, + 0, + ) + .unwrap(); + builder.subscribe(cx) + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Created { + terminal_id: terminal_id_2.clone(), + label: "echo 'second'".to_string(), + cwd: Some(PathBuf::from("/test")), + output_byte_limit: None, + terminal: mock_terminal_2.clone(), + }, + cx, + ); + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Output { + terminal_id: terminal_id_2.clone(), + data: b"second\n".to_vec(), + }, + cx, + ); + }); + + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Exit { + terminal_id: terminal_id_2.clone(), + status: acp::TerminalExitStatus::new().exit_code(0), + }, + cx, + ); + }); + + // Get the second message ID to restore to + let second_message_id = thread.read_with(cx, |thread, _| { + // At this point we have: + // - Index 0: First user message (with checkpoint) + // - Index 1: Second user message (with checkpoint) + // No assistant responses because FakeAgentConnection just returns EndTurn + let AgentThreadEntry::UserMessage(message) = &thread.entries[1] else { + panic!("expected user message at index 1"); + }; + message.id.clone().unwrap() + }); + + // Create a terminal AFTER the checkpoint we'll restore to. + // This simulates the AI agent starting a long-running terminal command. + let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); + let mock_terminal = cx.new(|cx| { + let builder = ::terminal::TerminalBuilder::new_display_only( + ::terminal::terminal_settings::CursorShape::default(), + ::terminal::terminal_settings::AlternateScroll::On, + None, + 0, + ) + .unwrap(); + builder.subscribe(cx) + }); + + // Register the terminal as created + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Created { + terminal_id: terminal_id.clone(), + label: "sleep 1000".to_string(), + cwd: Some(PathBuf::from("/test")), + output_byte_limit: None, + terminal: mock_terminal.clone(), + }, + cx, + ); + }); + + // Simulate the terminal producing output (still running) + thread.update(cx, |thread, cx| { + thread.on_terminal_provider_event( + TerminalProviderEvent::Output { + terminal_id: terminal_id.clone(), + data: b"terminal is running...\n".to_vec(), + }, + cx, + ); + }); + + // Create a tool call entry that references this terminal + // This represents the agent requesting a terminal command + thread.update(cx, |thread, cx| { + thread + .handle_session_update( + acp::SessionUpdate::ToolCall( + acp::ToolCall::new("terminal-tool-1", "Running command") + .kind(acp::ToolKind::Execute) + .status(acp::ToolCallStatus::InProgress) + .content(vec![acp::ToolCallContent::Terminal(acp::Terminal::new( + terminal_id.clone(), + ))]) + .raw_input(serde_json::json!({"command": "sleep 1000", "cd": "/test"})), + ), + cx, + ) + .unwrap(); + }); + + // Verify terminal exists and is in the thread + let terminal_exists_before = + thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id)); + assert!( + terminal_exists_before, + "Terminal should exist before checkpoint restore" + ); + + // Verify the terminal's underlying task is still running (not completed) + let terminal_running_before = thread.read_with(cx, |thread, _cx| { + let terminal_entity = thread.terminals.get(&terminal_id).unwrap(); + terminal_entity.read_with(cx, |term, _cx| { + term.output().is_none() // output is None means it's still running + }) + }); + assert!( + terminal_running_before, + "Terminal should be running before checkpoint restore" + ); + + // Verify we have the expected entries before restore + let entry_count_before = thread.read_with(cx, |thread, _| thread.entries.len()); + assert!( + entry_count_before > 1, + "Should have multiple entries before restore" + ); + + // Restore the checkpoint to the second message. + // This should: + // 1. Cancel any in-progress generation (via the cancel() call) + // 2. Remove the terminal that was created after that point + thread + .update(cx, |thread, cx| { + thread.restore_checkpoint(second_message_id, cx) + }) + .await + .unwrap(); + + // Verify that no send_task is in progress after restore + // (cancel() clears the send_task) + let has_send_task_after = thread.read_with(cx, |thread, _| thread.send_task.is_some()); + assert!( + !has_send_task_after, + "Should not have a send_task after restore (cancel should have cleared it)" + ); + + // Verify the entries were truncated (restoring to index 1 truncates at 1, keeping only index 0) + let entry_count = thread.read_with(cx, |thread, _| thread.entries.len()); + assert_eq!( + entry_count, 1, + "Should have 1 entry after restore (only the first user message)" + ); + + // Verify the 2 completed terminals from before the checkpoint still exist + let terminal_1_exists = thread.read_with(cx, |thread, _| { + thread.terminals.contains_key(&terminal_id_1) + }); + assert!( + terminal_1_exists, + "Terminal 1 (from before checkpoint) should still exist" + ); + + let terminal_2_exists = thread.read_with(cx, |thread, _| { + thread.terminals.contains_key(&terminal_id_2) + }); + assert!( + terminal_2_exists, + "Terminal 2 (from before checkpoint) should still exist" + ); + + // Verify they're still in completed state + let terminal_1_completed = thread.read_with(cx, |thread, _cx| { + let terminal_entity = thread.terminals.get(&terminal_id_1).unwrap(); + terminal_entity.read_with(cx, |term, _cx| term.output().is_some()) + }); + assert!(terminal_1_completed, "Terminal 1 should still be completed"); + + let terminal_2_completed = thread.read_with(cx, |thread, _cx| { + let terminal_entity = thread.terminals.get(&terminal_id_2).unwrap(); + terminal_entity.read_with(cx, |term, _cx| term.output().is_some()) + }); + assert!(terminal_2_completed, "Terminal 2 should still be completed"); + + // Verify the running terminal (created after checkpoint) was removed + let terminal_3_exists = + thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id)); + assert!( + !terminal_3_exists, + "Terminal 3 (created after checkpoint) should have been removed" + ); + + // Verify total count is 2 (the two from before the checkpoint) + let terminal_count = thread.read_with(cx, |thread, _| thread.terminals.len()); + assert_eq!( + terminal_count, 2, + "Should have exactly 2 terminals (the completed ones from before checkpoint)" + ); + } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 63ca65f22725c54476048542c90f5f5efcfd23ca..a670ba601159ec323ad2c88695c30bf4aeae4118 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -20,7 +20,7 @@ impl UserMessageId { } pub trait AgentConnection { - fn telemetry_id(&self) -> &'static str; + fn telemetry_id(&self) -> SharedString; fn new_thread( self: Rc, @@ -197,6 +197,17 @@ pub trait AgentModelSelector: 'static { fn watch(&self, _cx: &mut App) -> Option> { None } + + /// Returns whether the model picker should render a footer. + fn should_render_footer(&self) -> bool { + false + } + + /// Whether this selector supports the favorites feature. + /// Only the native agent uses the model ID format that maps to settings. + fn supports_favorites(&self) -> bool { + false + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -234,6 +245,10 @@ impl AgentModelList { AgentModelList::Grouped(groups) => groups.is_empty(), } } + + pub fn is_flat(&self) -> bool { + matches!(self, AgentModelList::Flat(_)) + } } #[cfg(feature = "test-support")] @@ -317,8 +332,8 @@ mod test_support { } impl AgentConnection for StubAgentConnection { - fn telemetry_id(&self) -> &'static str { - "stub" + fn telemetry_id(&self) -> SharedString { + "stub".into() } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -331,7 +346,7 @@ mod test_support { _cwd: &Path, cx: &mut gpui::App, ) -> Task>> { - let session_id = acp::SessionId(self.sessions.lock().len().to_string().into()); + let session_id = acp::SessionId::new(self.sessions.lock().len().to_string()); let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread = cx.new(|cx| { AcpThread::new( @@ -340,12 +355,12 @@ mod test_support { project, action_log, session_id.clone(), - watch::Receiver::constant(acp::PromptCapabilities { - image: true, - audio: true, - embedded_context: true, - meta: None, - }), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), cx, ) }); @@ -384,10 +399,7 @@ mod test_support { response_tx.replace(tx); cx.spawn(async move |_| { let stop_reason = rx.await?; - Ok(acp::PromptResponse { - stop_reason, - meta: None, - }) + Ok(acp::PromptResponse::new(stop_reason)) }) } else { for update in self.next_prompt_updates.lock().drain(..) { @@ -395,7 +407,7 @@ mod test_support { let update = update.clone(); let permission_request = if let acp::SessionUpdate::ToolCall(tool_call) = &update - && let Some(options) = self.permission_requests.get(&tool_call.id) + && let Some(options) = self.permission_requests.get(&tool_call.tool_call_id) { Some((tool_call.clone(), options.clone())) } else { @@ -424,10 +436,7 @@ mod test_support { cx.spawn(async move |_| { try_join_all(tasks).await?; - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) }) } } diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 055b2f7fb86ffe9d7f12459b6b16405ce77815a0..cae1aad90810c217324659d29c065af443494933 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -50,9 +50,14 @@ impl Diff { let hunk_ranges = { let buffer = buffer.read(cx); let diff = diff.read(cx); - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) - .collect::>() + diff.hunks_intersecting_range( + Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), + buffer, + cx, + ) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) + .collect::>() }; multibuffer.set_excerpts_for_path( @@ -161,7 +166,7 @@ impl Diff { } pub fn has_revealed_range(&self, cx: &App) -> bool { - self.multibuffer().read(cx).excerpt_paths().next().is_some() + self.multibuffer().read(cx).paths().next().is_some() } pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool { @@ -316,7 +321,12 @@ impl PendingDiff { let buffer = self.new_buffer.read(cx); let diff = self.diff.read(cx); let mut ranges = diff - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) + .hunks_intersecting_range( + Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), + buffer, + cx, + ) .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) .collect::>(); ranges.extend( diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index b78eac4903a259a1044892fb2c8233f7e973f025..3e2e53fb7fbdf581b45566bd747cfcbfc1c0a004 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -4,12 +4,14 @@ use file_icons::FileIcons; use prompt_store::{PromptId, UserPromptId}; use serde::{Deserialize, Serialize}; use std::{ + borrow::Cow, fmt, ops::RangeInclusive, path::{Path, PathBuf}, }; use ui::{App, IconName, SharedString}; use url::Url; +use urlencoding::decode; use util::paths::PathStyle; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] @@ -74,11 +76,13 @@ impl MentionUri { let path = url.path(); match url.scheme() { "file" => { - let path = if path_style.is_windows() { + let normalized = if path_style.is_windows() { path.trim_start_matches("/") } else { path }; + let decoded = decode(normalized).unwrap_or(Cow::Borrowed(normalized)); + let path = decoded.as_ref(); if let Some(fragment) = url.fragment() { let line_range = parse_line_range(fragment)?; @@ -108,7 +112,7 @@ impl MentionUri { if let Some(thread_id) = path.strip_prefix("/agent/thread/") { let name = single_query_param(&url, "name")?.context("Missing thread name")?; Ok(Self::Thread { - id: acp::SessionId(thread_id.into()), + id: acp::SessionId::new(thread_id), name, }) } else if let Some(path) = path.strip_prefix("/agent/text-thread/") { @@ -406,6 +410,19 @@ mod tests { assert_eq!(parsed.to_uri().to_string(), selection_uri); } + #[test] + fn test_parse_file_uri_with_non_ascii() { + let file_uri = uri!("file:///path/to/%E6%97%A5%E6%9C%AC%E8%AA%9E.txt"); + let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap(); + match &parsed { + MentionUri::File { abs_path } => { + assert_eq!(abs_path, Path::new(path!("/path/to/日本語.txt"))); + } + _ => panic!("Expected File variant"), + } + assert_eq!(parsed.to_uri().to_string(), file_uri); + } + #[test] fn test_parse_untitled_selection_uri() { let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10"); diff --git a/crates/acp_thread/src/terminal.rs b/crates/acp_thread/src/terminal.rs index 8b08868616e19b0d1855558a057af8eebc314e4a..f70e044fbc1b380768dbcd807f1833f6fb5cd48b 100644 --- a/crates/acp_thread/src/terminal.rs +++ b/crates/acp_thread/src/terminal.rs @@ -75,11 +75,9 @@ impl Terminal { let exit_status = exit_status.map(portable_pty::ExitStatus::from); - acp::TerminalExitStatus { - exit_code: exit_status.as_ref().map(|e| e.exit_code()), - signal: exit_status.and_then(|e| e.signal().map(Into::into)), - meta: None, - } + acp::TerminalExitStatus::new() + .exit_code(exit_status.as_ref().map(|e| e.exit_code())) + .signal(exit_status.and_then(|e| e.signal().map(ToOwned::to_owned))) }) .shared(), } @@ -103,25 +101,19 @@ impl Terminal { if let Some(output) = self.output.as_ref() { let exit_status = output.exit_status.map(portable_pty::ExitStatus::from); - acp::TerminalOutputResponse { - output: output.content.clone(), - truncated: output.original_content_len > output.content.len(), - exit_status: Some(acp::TerminalExitStatus { - exit_code: exit_status.as_ref().map(|e| e.exit_code()), - signal: exit_status.and_then(|e| e.signal().map(Into::into)), - meta: None, - }), - meta: None, - } + acp::TerminalOutputResponse::new( + output.content.clone(), + output.original_content_len > output.content.len(), + ) + .exit_status( + acp::TerminalExitStatus::new() + .exit_code(exit_status.as_ref().map(|e| e.exit_code())) + .signal(exit_status.and_then(|e| e.signal().map(ToOwned::to_owned))), + ) } else { let (current_content, original_len) = self.truncated_output(cx); - - acp::TerminalOutputResponse { - truncated: current_content.len() < original_len, - output: current_content, - exit_status: None, - meta: None, - } + let truncated = current_content.len() < original_len; + acp::TerminalOutputResponse::new(current_content, truncated) } } @@ -195,8 +187,10 @@ pub async fn create_terminal_entity( Default::default() }; - // Disables paging for `git` and hopefully other commands + // Disable pagers so agent/terminal commands don't hang behind interactive UIs env.insert("PAGER".into(), "".into()); + // Override user core.pager (e.g. delta) which Git prefers over PAGER + env.insert("GIT_PAGER".into(), "cat".into()); env.extend(env_vars); // Use remote shell or default system shell, as appropriate diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index 7615784676c7d9ff1782a6e9537e608cb927154d..b0d30367da0634dc82f8db96fc099e268aa4790e 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -371,13 +371,13 @@ impl AcpTools { syntax: cx.theme().syntax().clone(), code_block_overflow_x_scroll: true, code_block: StyleRefinement { - text: Some(TextStyleRefinement { + text: TextStyleRefinement { font_family: Some( theme_settings.buffer_font.family.clone(), ), font_size: Some((base_size * 0.8).into()), ..Default::default() - }), + }, ..Default::default() }, ..Default::default() @@ -528,7 +528,7 @@ impl Render for AcpTools { .with_sizing_behavior(gpui::ListSizingBehavior::Auto) .size_full(), ) - .vertical_scrollbar_for(connection.list_state.clone(), window, cx) + .vertical_scrollbar_for(&connection.list_state, window, cx) .into_any() } } diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 78265007a5abe3e724166610013ade776d82dbeb..6eb18a4f12325f0c181928f99b4eb921265dbf9c 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -409,9 +409,11 @@ impl ActionLog { let new_diff_base = new_diff_base.clone(); async move { let mut unreviewed_edits = Patch::default(); - for hunk in diff_snapshot - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot) - { + for hunk in diff_snapshot.hunks_intersecting_range( + Anchor::min_for_buffer(buffer_snapshot.remote_id()) + ..Anchor::max_for_buffer(buffer_snapshot.remote_id()), + &buffer_snapshot, + ) { let old_range = new_diff_base .offset_to_point(hunk.diff_base_byte_range.start) ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end); @@ -732,12 +734,10 @@ impl ActionLog { cx: &mut Context, ) -> Task<()> { let futures = self.changed_buffers(cx).into_keys().map(|buffer| { - let reject = self.reject_edits_in_ranges( - buffer, - vec![Anchor::MIN..Anchor::MAX], - telemetry.clone(), - cx, - ); + let buffer_ranges = vec![Anchor::min_max_range_for_buffer( + buffer.read(cx).remote_id(), + )]; + let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx); async move { reject.await.log_err(); @@ -777,7 +777,7 @@ impl ActionLog { #[derive(Clone)] pub struct ActionLogTelemetry { - pub agent_telemetry_id: &'static str, + pub agent_telemetry_id: SharedString, pub session_id: Arc, } @@ -2010,7 +2010,8 @@ mod tests { // User accepts the single hunk action_log.update(cx, |log, cx| { - log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx) + let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()); + log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx) }); cx.run_until_parked(); assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); @@ -2031,7 +2032,14 @@ mod tests { // User rejects the hunk action_log .update(cx, |log, cx| { - log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx) + log.reject_edits_in_ranges( + buffer.clone(), + vec![Anchor::min_max_range_for_buffer( + buffer.read(cx).remote_id(), + )], + None, + cx, + ) }) .await .unwrap(); diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 99ae5b5b077a14c0909737d64935220698a007c7..8587e52723b48d1495bbfbc5442bb8007aed1786 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -23,6 +23,7 @@ gpui.workspace = true language.workspace = true project.workspace = true proto.workspace = true +semver.workspace = true smallvec.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 5cb4e1c6153154782bf10447c13c3a9017cbcce7..b537fabc9b7102f0d9cfab42370a21983a941f19 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -925,15 +925,15 @@ impl StatusItemView for ActivityIndicator { #[cfg(test)] mod tests { - use gpui::SemanticVersion; use release_channel::AppCommitSha; + use semver::Version; use super::*; #[test] fn test_version_tooltip_message() { let message = ActivityIndicator::version_tooltip_message(&VersionCheckType::Semantic( - SemanticVersion::new(1, 0, 0), + Version::new(1, 0, 0), )); assert_eq!(message, "Version: 1.0.0"); diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index cacbbd6e4e4423e2560fb963ef59daddce2309dc..667033a1bb33ea0372b8a9d8b0bfb00b23f59347 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -83,6 +83,7 @@ ctor.workspace = true db = { workspace = true, "features" = ["test-support"] } editor = { workspace = true, "features" = ["test-support"] } env_logger.workspace = true +eval_utils.workspace = true fs = { workspace = true, "features" = ["test-support"] } git = { workspace = true, "features" = ["test-support"] } gpui = { workspace = true, "features" = ["test-support"] } diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index fc0b66f4073ea137f53b29286b0c17b53d11bf83..5e16f74682ef95a4e990ed5a124a0d6031acfb0e 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -5,12 +5,12 @@ mod legacy_thread; mod native_agent_server; pub mod outline; mod templates; -mod thread; -mod tools; - #[cfg(test)] mod tests; +mod thread; +mod tools; +use context_server::ContextServerId; pub use db::*; pub use history_store::*; pub use native_agent_server::NativeAgentServer; @@ -18,11 +18,11 @@ pub use templates::*; pub use thread::*; pub use tools::*; -use acp_thread::{AcpThread, AgentModelSelector}; +use acp_thread::{AcpThread, AgentModelSelector, UserMessageId}; use agent_client_protocol as acp; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; -use collections::{HashSet, IndexMap}; +use collections::{HashMap, HashSet, IndexMap}; use fs::Fs; use futures::channel::{mpsc, oneshot}; use futures::future::Shared; @@ -33,12 +33,12 @@ use gpui::{ use language_model::{LanguageModel, LanguageModelProvider, LanguageModelRegistry}; use project::{Project, ProjectItem, ProjectPath, Worktree}; use prompt_store::{ - ProjectContext, PromptStore, RulesFileContext, UserRulesContext, WorktreeContext, + ProjectContext, PromptStore, RULES_FILE_NAMES, RulesFileContext, UserRulesContext, + WorktreeContext, }; use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, update_settings_file}; use std::any::Any; -use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; @@ -51,18 +51,6 @@ pub struct ProjectSnapshot { pub timestamp: DateTime, } -const RULES_FILE_NAMES: [&str; 9] = [ - ".rules", - ".cursorrules", - ".windsurfrules", - ".clinerules", - ".github/copilot-instructions.md", - "CLAUDE.md", - "AGENT.md", - "AGENTS.md", - "GEMINI.md", -]; - pub struct RulesLoadingError { pub message: SharedString, } @@ -133,9 +121,7 @@ impl LanguageModels { for model in provider.provided_models(cx) { let model_info = Self::map_language_model_to_info(&model, &provider); let model_id = model_info.id.clone(); - if !recommended_models.contains(&(model.provider_id(), model.id())) { - provider_models.push(model_info); - } + provider_models.push(model_info); models.insert(model_id, model); } if !provider_models.is_empty() { @@ -172,7 +158,7 @@ impl LanguageModels { } fn model_id(model: &Arc) -> acp::ModelId { - acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) + acp::ModelId::new(format!("{}/{}", model.provider_id().0, model.id().0)) } fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> { @@ -265,12 +251,24 @@ impl NativeAgent { .await; cx.new(|cx| { + let context_server_store = project.read(cx).context_server_store(); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); + let mut subscriptions = vec![ cx.subscribe(&project, Self::handle_project_event), cx.subscribe( &LanguageModelRegistry::global(cx), Self::handle_models_updated_event, ), + cx.subscribe( + &context_server_store, + Self::handle_context_server_store_updated, + ), + cx.subscribe( + &context_server_registry, + Self::handle_context_server_registry_event, + ), ]; if let Some(prompt_store) = prompt_store.as_ref() { subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) @@ -279,16 +277,14 @@ impl NativeAgent { let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = watch::channel(()); Self { - sessions: HashMap::new(), + sessions: HashMap::default(), history, project_context: cx.new(|_| project_context), project_context_needs_refresh: project_context_needs_refresh_tx, _maintain_project_context: cx.spawn(async move |this, cx| { Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await }), - context_server_registry: cx.new(|cx| { - ContextServerRegistry::new(project.read(cx).context_server_store(), cx) - }), + context_server_registry, templates, models: LanguageModels::new(cx), project, @@ -357,6 +353,9 @@ impl NativeAgent { pending_save: Task::ready(()), }, ); + + self.update_available_commands(cx); + acp_thread } @@ -427,10 +426,7 @@ impl NativeAgent { .into_iter() .flat_map(|(contents, prompt_metadata)| match contents { Ok(contents) => Some(UserRulesContext { - uuid: match prompt_metadata.id { - prompt_store::PromptId::User { uuid } => uuid, - prompt_store::PromptId::EditWorkflow => return None, - }, + uuid: prompt_metadata.id.user_id()?, title: prompt_metadata.title.map(|title| title.to_string()), contents, }), @@ -624,6 +620,99 @@ impl NativeAgent { } } + fn handle_context_server_store_updated( + &mut self, + _store: Entity, + _event: &project::context_server_store::Event, + cx: &mut Context, + ) { + self.update_available_commands(cx); + } + + fn handle_context_server_registry_event( + &mut self, + _registry: Entity, + event: &ContextServerRegistryEvent, + cx: &mut Context, + ) { + match event { + ContextServerRegistryEvent::ToolsChanged => {} + ContextServerRegistryEvent::PromptsChanged => { + self.update_available_commands(cx); + } + } + } + + fn update_available_commands(&self, cx: &mut Context) { + let available_commands = self.build_available_commands(cx); + for session in self.sessions.values() { + if let Some(acp_thread) = session.acp_thread.upgrade() { + acp_thread.update(cx, |thread, cx| { + thread + .handle_session_update( + acp::SessionUpdate::AvailableCommandsUpdate( + acp::AvailableCommandsUpdate::new(available_commands.clone()), + ), + cx, + ) + .log_err(); + }); + } + } + } + + fn build_available_commands(&self, cx: &App) -> Vec { + let registry = self.context_server_registry.read(cx); + + let mut prompt_name_counts: HashMap<&str, usize> = HashMap::default(); + for context_server_prompt in registry.prompts() { + *prompt_name_counts + .entry(context_server_prompt.prompt.name.as_str()) + .or_insert(0) += 1; + } + + registry + .prompts() + .flat_map(|context_server_prompt| { + let prompt = &context_server_prompt.prompt; + + let should_prefix = prompt_name_counts + .get(prompt.name.as_str()) + .copied() + .unwrap_or(0) + > 1; + + let name = if should_prefix { + format!("{}.{}", context_server_prompt.server_id, prompt.name) + } else { + prompt.name.clone() + }; + + let mut command = acp::AvailableCommand::new( + name, + prompt.description.clone().unwrap_or_default(), + ); + + match prompt.arguments.as_deref() { + Some([arg]) => { + let hint = format!("<{}>", arg.name); + + command = command.input(acp::AvailableCommandInput::Unstructured( + acp::UnstructuredCommandInput::new(hint), + )); + } + Some([]) | None => {} + Some(_) => { + // skip >1 argument commands since we don't support them yet + return None; + } + } + + Some(command) + }) + .collect() + } + pub fn load_thread( &mut self, id: acp::SessionId, @@ -722,6 +811,102 @@ impl NativeAgent { history.update(cx, |history, cx| history.reload(cx)).ok(); }); } + + fn send_mcp_prompt( + &self, + message_id: UserMessageId, + session_id: agent_client_protocol::SessionId, + prompt_name: String, + server_id: ContextServerId, + arguments: HashMap, + original_content: Vec, + cx: &mut Context, + ) -> Task> { + let server_store = self.context_server_registry.read(cx).server_store().clone(); + let path_style = self.project.read(cx).path_style(cx); + + cx.spawn(async move |this, cx| { + let prompt = + crate::get_prompt(&server_store, &server_id, &prompt_name, arguments, cx).await?; + + let (acp_thread, thread) = this.update(cx, |this, _cx| { + let session = this + .sessions + .get(&session_id) + .context("Failed to get session")?; + anyhow::Ok((session.acp_thread.clone(), session.thread.clone())) + })??; + + let mut last_is_user = true; + + thread.update(cx, |thread, cx| { + thread.push_acp_user_block( + message_id, + original_content.into_iter().skip(1), + path_style, + cx, + ); + })?; + + for message in prompt.messages { + let context_server::types::PromptMessage { role, content } = message; + let block = mcp_message_content_to_acp_content_block(content); + + match role { + context_server::types::Role::User => { + let id = acp_thread::UserMessageId::new(); + + acp_thread.update(cx, |acp_thread, cx| { + acp_thread.push_user_content_block_with_indent( + Some(id.clone()), + block.clone(), + true, + cx, + ); + anyhow::Ok(()) + })??; + + thread.update(cx, |thread, cx| { + thread.push_acp_user_block(id, [block], path_style, cx); + anyhow::Ok(()) + })??; + } + context_server::types::Role::Assistant => { + acp_thread.update(cx, |acp_thread, cx| { + acp_thread.push_assistant_content_block_with_indent( + block.clone(), + false, + true, + cx, + ); + anyhow::Ok(()) + })??; + + thread.update(cx, |thread, cx| { + thread.push_acp_agent_block(block, cx); + anyhow::Ok(()) + })??; + } + } + + last_is_user = role == context_server::types::Role::User; + } + + let response_stream = thread.update(cx, |thread, cx| { + if last_is_user { + thread.send_existing(cx) + } else { + // Resume if MCP prompt did not end with a user message + thread.resume(cx) + } + })??; + + cx.update(|cx| { + NativeAgentConnection::handle_thread_events(response_stream, acp_thread, cx) + })? + .await + }) + } } /// Wrapper struct that implements the AgentConnection trait @@ -791,28 +976,12 @@ impl NativeAgentConnection { } ThreadEvent::AgentText(text) => { acp_thread.update(cx, |thread, cx| { - thread.push_assistant_content_block( - acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - false, - cx, - ) + thread.push_assistant_content_block(text.into(), false, cx) })?; } ThreadEvent::AgentThinking(text) => { acp_thread.update(cx, |thread, cx| { - thread.push_assistant_content_block( - acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - true, - cx, - ) + thread.push_assistant_content_block(text.into(), true, cx) })?; } ThreadEvent::ToolCallAuthorization(ToolCallAuthorization { @@ -826,8 +995,9 @@ impl NativeAgentConnection { ) })??; cx.background_spawn(async move { - if let acp::RequestPermissionOutcome::Selected { option_id } = - outcome_task.await + if let acp::RequestPermissionOutcome::Selected( + acp::SelectedPermissionOutcome { option_id, .. }, + ) = outcome_task.await { response .send(option_id) @@ -854,10 +1024,7 @@ impl NativeAgentConnection { } ThreadEvent::Stop(stop_reason) => { log::debug!("Assistant message complete: {:?}", stop_reason); - return Ok(acp::PromptResponse { - stop_reason, - meta: None, - }); + return Ok(acp::PromptResponse::new(stop_reason)); } } } @@ -869,14 +1036,44 @@ impl NativeAgentConnection { } log::debug!("Response stream completed"); - anyhow::Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) + anyhow::Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) }) } } +struct Command<'a> { + prompt_name: &'a str, + arg_value: &'a str, + explicit_server_id: Option<&'a str>, +} + +impl<'a> Command<'a> { + fn parse(prompt: &'a [acp::ContentBlock]) -> Option { + let acp::ContentBlock::Text(text_content) = prompt.first()? else { + return None; + }; + let text = text_content.text.trim(); + let command = text.strip_prefix('/')?; + let (command, arg_value) = command + .split_once(char::is_whitespace) + .unwrap_or((command, "")); + + if let Some((server_id, prompt_name)) = command.split_once('.') { + Some(Self { + prompt_name, + arg_value, + explicit_server_id: Some(server_id), + }) + } else { + Some(Self { + prompt_name: command, + arg_value, + explicit_server_id: None, + }) + } + } +} + struct NativeAgentModelSelector { session_id: acp::SessionId, connection: NativeAgentConnection, @@ -963,11 +1160,19 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector { fn watch(&self, cx: &mut App) -> Option> { Some(self.connection.0.read(cx).models.watch()) } + + fn should_render_footer(&self) -> bool { + true + } + + fn supports_favorites(&self) -> bool { + true + } } impl acp_thread::AgentConnection for NativeAgentConnection { - fn telemetry_id(&self) -> &'static str { - "zed" + fn telemetry_id(&self) -> SharedString { + "zed".into() } fn new_thread( @@ -1038,6 +1243,47 @@ impl acp_thread::AgentConnection for NativeAgentConnection { let session_id = params.session_id.clone(); log::info!("Received prompt request for session: {}", session_id); log::debug!("Prompt blocks count: {}", params.prompt.len()); + + if let Some(parsed_command) = Command::parse(¶ms.prompt) { + let registry = self.0.read(cx).context_server_registry.read(cx); + + let explicit_server_id = parsed_command + .explicit_server_id + .map(|server_id| ContextServerId(server_id.into())); + + if let Some(prompt) = + registry.find_prompt(explicit_server_id.as_ref(), parsed_command.prompt_name) + { + let arguments = if !parsed_command.arg_value.is_empty() + && let Some(arg_name) = prompt + .prompt + .arguments + .as_ref() + .and_then(|args| args.first()) + .map(|arg| arg.name.clone()) + { + HashMap::from_iter([(arg_name, parsed_command.arg_value.to_string())]) + } else { + Default::default() + }; + + let prompt_name = prompt.prompt.name.clone(); + let server_id = prompt.server_id.clone(); + + return self.0.update(cx, |agent, cx| { + agent.send_mcp_prompt( + id, + session_id.clone(), + prompt_name, + server_id, + arguments, + params.prompt, + cx, + ) + }); + }; + }; + let path_style = self.0.read(cx).project.read(cx).path_style(cx); self.run_turn(session_id, cx, move |thread, cx| { @@ -1238,6 +1484,15 @@ impl TerminalHandle for AcpTerminalHandle { self.terminal .read_with(cx, |term, cx| term.current_output(cx)) } + + fn kill(&self, cx: &AsyncApp) -> Result<()> { + cx.update(|cx| { + self.terminal.update(cx, |terminal, cx| { + terminal.kill(cx); + }); + })?; + Ok(()) + } } #[cfg(test)] @@ -1372,7 +1627,7 @@ mod internal_tests { IndexMap::from_iter([( AgentModelGroupName("Fake".into()), vec![AgentModelInfo { - id: acp::ModelId("fake/fake".into()), + id: acp::ModelId::new("fake/fake"), name: "Fake".into(), description: None, icon: Some(ui::IconName::ZedAssistant), @@ -1433,7 +1688,7 @@ mod internal_tests { // Select a model let selector = connection.model_selector(&session_id).unwrap(); - let model_id = acp::ModelId("fake/fake".into()); + let model_id = acp::ModelId::new("fake/fake"); cx.update(|cx| selector.select_model(model_id.clone(), cx)) .await .unwrap(); @@ -1519,20 +1774,14 @@ mod internal_tests { thread.send( vec![ "What does ".into(), - acp::ContentBlock::ResourceLink(acp::ResourceLink { - name: "b.md".into(), - uri: MentionUri::File { + acp::ContentBlock::ResourceLink(acp::ResourceLink::new( + "b.md", + MentionUri::File { abs_path: path!("/a/b.md").into(), } .to_uri() .to_string(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }), + )), " mean?".into(), ], cx, @@ -1631,3 +1880,35 @@ mod internal_tests { }); } } + +fn mcp_message_content_to_acp_content_block( + content: context_server::types::MessageContent, +) -> acp::ContentBlock { + match content { + context_server::types::MessageContent::Text { + text, + annotations: _, + } => text.into(), + context_server::types::MessageContent::Image { + data, + mime_type, + annotations: _, + } => acp::ContentBlock::Image(acp::ImageContent::new(data, mime_type)), + context_server::types::MessageContent::Audio { + data, + mime_type, + annotations: _, + } => acp::ContentBlock::Audio(acp::AudioContent::new(data, mime_type)), + context_server::types::MessageContent::Resource { + resource, + annotations: _, + } => { + let mut link = + acp::ResourceLink::new(resource.uri.to_string(), resource.uri.to_string()); + if let Some(mime_type) = resource.mime_type { + link = link.mime_type(mime_type); + } + acp::ContentBlock::ResourceLink(link) + } + } +} diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index c72e20571e2761788157a5fd10df147c2b414e4a..7a88c5870574cae424bd1fff50f1d20cdb00fa44 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -150,6 +150,7 @@ impl DbThread { .unwrap_or_default(), input: tool_use.input, is_input_complete: true, + thought_signature: None, }, )); } @@ -181,6 +182,7 @@ impl DbThread { crate::Message::Agent(AgentMessage { content, tool_results, + reasoning_details: None, }) } language_model::Role::System => { @@ -364,7 +366,7 @@ impl ThreadsDatabase { for (id, summary, updated_at) in rows { threads.push(DbThreadMetadata { - id: acp::SessionId(id), + id: acp::SessionId::new(id), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), }); @@ -422,4 +424,20 @@ impl ThreadsDatabase { Ok(()) }) } + + pub fn delete_threads(&self) -> Task> { + let connection = self.connection.clone(); + + self.executor.spawn(async move { + let connection = connection.lock(); + + let mut delete = connection.exec_bound::<()>(indoc! {" + DELETE FROM threads + "})?; + + delete(())?; + + Ok(()) + }) + } } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 2ecf3429d46540ea309052e833c3e40ea2a53cb5..5ea04729a49afae944c5e7ca88ad67791e18b6f3 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -172,14 +172,14 @@ impl EditAgent { project.set_agent_location( Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX, + position: language::Anchor::max_for_buffer(buffer.read(cx).remote_id()), }), cx, ) }); output_events_tx .unbounded_send(EditAgentOutputEvent::Edited( - language::Anchor::MIN..language::Anchor::MAX, + Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), )) .ok(); })?; @@ -187,7 +187,7 @@ impl EditAgent { while let Some(event) = parse_rx.next().await { match event? { CreateFileParserEvent::NewTextChunk { chunk } => { - cx.update(|cx| { + let buffer_id = cx.update(|cx| { buffer.update(cx, |buffer, cx| buffer.append(chunk, cx)); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); @@ -195,15 +195,18 @@ impl EditAgent { project.set_agent_location( Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX, + position: language::Anchor::max_for_buffer( + buffer.read(cx).remote_id(), + ), }), cx, ) }); + buffer.read(cx).remote_id() })?; output_events_tx .unbounded_send(EditAgentOutputEvent::Edited( - language::Anchor::MIN..language::Anchor::MAX, + Anchor::min_max_range_for_buffer(buffer_id), )) .ok(); } @@ -703,6 +706,7 @@ impl EditAgent { role: Role::User, content: vec![MessageContent::Text(prompt)], cache: false, + reasoning_details: None, }); // Include tools in the request so that we can take advantage of @@ -1199,7 +1203,9 @@ mod tests { project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX + position: language::Anchor::max_for_buffer( + cx.update(|cx| buffer.read(cx).remote_id()) + ), }) ); @@ -1217,7 +1223,9 @@ mod tests { project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX + position: language::Anchor::max_for_buffer( + cx.update(|cx| buffer.read(cx).remote_id()) + ), }) ); @@ -1235,7 +1243,9 @@ mod tests { project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX + position: language::Anchor::max_for_buffer( + cx.update(|cx| buffer.read(cx).remote_id()) + ), }) ); @@ -1253,7 +1263,9 @@ mod tests { project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX + position: language::Anchor::max_for_buffer( + cx.update(|cx| buffer.read(cx).remote_id()) + ), }) ); @@ -1268,7 +1280,9 @@ mod tests { project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::MAX + position: language::Anchor::max_for_buffer( + cx.update(|cx| buffer.read(cx).remote_id()) + ), }) ); } diff --git a/crates/agent/src/edit_agent/edit_parser.rs b/crates/agent/src/edit_agent/edit_parser.rs index 425bf93efff115d4daef380e3f82abcdb8c0746f..c1aa61e18d4a450bc1caef939a53f65aed529a82 100644 --- a/crates/agent/src/edit_agent/edit_parser.rs +++ b/crates/agent/src/edit_agent/edit_parser.rs @@ -15,12 +15,14 @@ const SEPARATOR_MARKER: &str = "======="; const REPLACE_MARKER: &str = ">>>>>>> REPLACE"; const SONNET_PARAMETER_INVOKE_1: &str = "\n"; const SONNET_PARAMETER_INVOKE_2: &str = ""; -const END_TAGS: [&str; 5] = [ +const SONNET_PARAMETER_INVOKE_3: &str = ""; +const END_TAGS: [&str; 6] = [ OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG, - SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call + SONNET_PARAMETER_INVOKE_1, // Remove these after switching to streaming tool call SONNET_PARAMETER_INVOKE_2, + SONNET_PARAMETER_INVOKE_3, ]; #[derive(Debug)] @@ -567,21 +569,29 @@ mod tests { parse_random_chunks( indoc! {" some textupdated text + more textupd "}, &mut parser, &mut rng ), - vec![Edit { - old_text: "some text".to_string(), - new_text: "updated text".to_string(), - line_hint: None, - },] + vec![ + Edit { + old_text: "some text".to_string(), + new_text: "updated text".to_string(), + line_hint: None, + }, + Edit { + old_text: "more text".to_string(), + new_text: "upd".to_string(), + line_hint: None, + }, + ] ); assert_eq!( parser.finish(), EditParserMetrics { - tags: 2, - mismatched_tags: 1 + tags: 4, + mismatched_tags: 2 } ); } diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index 54aa6ae5c95022ee1ef022aed78d46533de356be..01c81e0103a2d3624c7e8eb9b9c587726fcc4876 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -4,7 +4,7 @@ use crate::{ }; use Role::*; use client::{Client, UserStore}; -use collections::HashMap; +use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind}; use fs::FakeFs; use futures::{FutureExt, future::LocalBoxFuture}; use gpui::{AppContext, TestAppContext, Timer}; @@ -20,16 +20,62 @@ use rand::prelude::*; use reqwest_client::ReqwestClient; use serde_json::json; use std::{ - cmp::Reverse, fmt::{self, Display}, - io::Write as _, path::Path, str::FromStr, - sync::mpsc, time::Duration, }; use util::path; +#[derive(Default, Clone, Debug)] +struct EditAgentOutputProcessor { + mismatched_tag_threshold: f32, + cumulative_tags: usize, + cumulative_mismatched_tags: usize, + eval_outputs: Vec>, +} + +fn mismatched_tag_threshold(mismatched_tag_threshold: f32) -> EditAgentOutputProcessor { + EditAgentOutputProcessor { + mismatched_tag_threshold, + cumulative_tags: 0, + cumulative_mismatched_tags: 0, + eval_outputs: Vec::new(), + } +} + +#[derive(Clone, Debug)] +struct EditEvalMetadata { + tags: usize, + mismatched_tags: usize, +} + +impl EvalOutputProcessor for EditAgentOutputProcessor { + type Metadata = EditEvalMetadata; + + fn process(&mut self, output: &EvalOutput) { + if matches!(output.outcome, OutcomeKind::Passed | OutcomeKind::Failed) { + self.cumulative_mismatched_tags += output.metadata.mismatched_tags; + self.cumulative_tags += output.metadata.tags; + self.eval_outputs.push(output.clone()); + } + } + + fn assert(&mut self) { + let mismatched_tag_ratio = + self.cumulative_mismatched_tags as f32 / self.cumulative_tags as f32; + if mismatched_tag_ratio > self.mismatched_tag_threshold { + for eval_output in &self.eval_outputs { + println!("{}", eval_output.data); + } + panic!( + "Too many mismatched tags: {:?}", + self.cumulative_mismatched_tags + ); + } + } +} + #[test] #[cfg_attr(not(feature = "unit-eval"), ignore)] fn eval_extract_handle_command_output() { @@ -55,22 +101,19 @@ fn eval_extract_handle_command_output() { include_str!("evals/fixtures/extract_handle_command_output/possible-07.diff"), ]; let edit_description = "Extract `handle_command_output` method from `run_git_blame`."; - eval( - 100, - 0.95, - 0.05, - EvalInput::from_conversation( + eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(formatdoc! {" - Read the `{input_file_path}` file and extract a method in - the final stanza of `run_git_blame` to deal with command failures, - call it `handle_command_output` and take the std::process::Output as the only parameter. - Do not document the method and do not add any comments. + Read the `{input_file_path}` file and extract a method in + the final stanza of `run_git_blame` to deal with command failures, + call it `handle_command_output` and take the std::process::Output as the only parameter. + Do not document the method and do not add any comments. - Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`. - "})], + Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`. + "})], ), message( Assistant, @@ -102,9 +145,9 @@ fn eval_extract_handle_command_output() { ), ], Some(input_file_content.into()), - EvalAssertion::assert_diff_any(possible_diffs), - ), - ); + EvalAssertion::assert_diff_any(possible_diffs.clone()), + )) + }); } #[test] @@ -122,18 +165,16 @@ fn eval_delete_run_git_blame() { let input_file_content = include_str!("evals/fixtures/delete_run_git_blame/before.rs"); let output_file_content = include_str!("evals/fixtures/delete_run_git_blame/after.rs"); let edit_description = "Delete the `run_git_blame` function."; - eval( - 100, - 0.95, - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(formatdoc! {" - Read the `{input_file_path}` file and delete `run_git_blame`. Just that - one function, not its usages. - "})], + Read the `{input_file_path}` file and delete `run_git_blame`. Just that + one function, not its usages. + "})], ), message( Assistant, @@ -166,8 +207,8 @@ fn eval_delete_run_git_blame() { ], Some(input_file_content.into()), EvalAssertion::assert_eq(output_file_content), - ), - ); + )) + }); } #[test] @@ -185,18 +226,16 @@ fn eval_translate_doc_comments() { let input_file_path = "root/canvas.rs"; let input_file_content = include_str!("evals/fixtures/translate_doc_comments/before.rs"); let edit_description = "Translate all doc comments to Italian"; - eval( - 200, - 1., - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(200, 1., mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(formatdoc! {" - Read the {input_file_path} file and edit it (without overwriting it), - translating all the doc comments to italian. - "})], + Read the {input_file_path} file and edit it (without overwriting it), + translating all the doc comments to italian. + "})], ), message( Assistant, @@ -229,8 +268,8 @@ fn eval_translate_doc_comments() { ], Some(input_file_content.into()), EvalAssertion::judge_diff("Doc comments were translated to Italian"), - ), - ); + )) + }); } #[test] @@ -249,33 +288,31 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { let input_file_content = include_str!("evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs"); let edit_description = "Update compile_parser_to_wasm to use wasi-sdk instead of emscripten"; - eval( - 100, - 0.95, - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(formatdoc! {" - Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten. - Use `ureq` to download the SDK for the current platform and architecture. - Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir. - Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows) - that's inside of the archive. - Don't re-download the SDK if that executable already exists. - - Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}} - - Here are the available wasi-sdk assets: - - wasi-sdk-25.0-x86_64-macos.tar.gz - - wasi-sdk-25.0-arm64-macos.tar.gz - - wasi-sdk-25.0-x86_64-linux.tar.gz - - wasi-sdk-25.0-arm64-linux.tar.gz - - wasi-sdk-25.0-x86_64-linux.tar.gz - - wasi-sdk-25.0-arm64-linux.tar.gz - - wasi-sdk-25.0-x86_64-windows.tar.gz - "})], + Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten. + Use `ureq` to download the SDK for the current platform and architecture. + Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir. + Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows) + that's inside of the archive. + Don't re-download the SDK if that executable already exists. + + Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}} + + Here are the available wasi-sdk assets: + - wasi-sdk-25.0-x86_64-macos.tar.gz + - wasi-sdk-25.0-arm64-macos.tar.gz + - wasi-sdk-25.0-x86_64-linux.tar.gz + - wasi-sdk-25.0-arm64-linux.tar.gz + - wasi-sdk-25.0-x86_64-linux.tar.gz + - wasi-sdk-25.0-arm64-linux.tar.gz + - wasi-sdk-25.0-x86_64-windows.tar.gz + "})], ), message( Assistant, @@ -352,11 +389,11 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { ], Some(input_file_content.into()), EvalAssertion::judge_diff(indoc! {" - - The compile_parser_to_wasm method has been changed to use wasi-sdk - - ureq is used to download the SDK for current platform and architecture - "}), - ), - ); + - The compile_parser_to_wasm method has been changed to use wasi-sdk + - ureq is used to download the SDK for current platform and architecture + "}), + )) + }); } #[test] @@ -380,11 +417,8 @@ fn eval_disable_cursor_blinking() { include_str!("evals/fixtures/disable_cursor_blinking/possible-03.diff"), include_str!("evals/fixtures/disable_cursor_blinking/possible-04.diff"), ]; - eval( - 100, - 0.51, - 0.05, - EvalInput::from_conversation( + eval_utils::eval(100, 0.51, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message(User, [text("Let's research how to cursor blinking works.")]), message( @@ -421,10 +455,10 @@ fn eval_disable_cursor_blinking() { message( User, [text(indoc! {" - Comment out the lines that interact with the BlinkManager. - Keep the outer `update` blocks, but comments everything that's inside (including if statements). - Don't add additional comments. - "})], + Comment out the lines that interact with the BlinkManager. + Keep the outer `update` blocks, but comments everything that's inside (including if statements). + Don't add additional comments. + "})], ), message( Assistant, @@ -440,9 +474,9 @@ fn eval_disable_cursor_blinking() { ), ], Some(input_file_content.into()), - EvalAssertion::assert_diff_any(possible_diffs), - ), - ); + EvalAssertion::assert_diff_any(possible_diffs.clone()), + )) + }); } #[test] @@ -467,20 +501,16 @@ fn eval_from_pixels_constructor() { let input_file_path = "root/canvas.rs"; let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs"); let edit_description = "Implement from_pixels constructor and add tests."; - eval( - 100, - 0.95, - // For whatever reason, this eval produces more mismatched tags. - // Increasing for now, let's see if we can bring this down. - 0.25, - EvalInput::from_conversation( + + eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.25), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(indoc! {" - Introduce a new `from_pixels` constructor in Canvas and - also add tests for it in the same file. - "})], + Introduce a new `from_pixels` constructor in Canvas and + also add tests for it in the same file. + "})], ), message( Assistant, @@ -545,92 +575,92 @@ fn eval_from_pixels_constructor() { "tool_4", "grep", indoc! {" - Found 6 matches: + Found 6 matches: - ## Matches in font-kit/src/loaders/core_text.rs + ## Matches in font-kit/src/loaders/core_text.rs - ### mod test › L926-936 - ``` - mod test { - use super::Font; - use crate::properties::{Stretch, Weight}; + ### mod test › L926-936 + ``` + mod test { + use super::Font; + use crate::properties::{Stretch, Weight}; - #[cfg(feature = \"source\")] - use crate::source::SystemSource; + #[cfg(feature = \"source\")] + use crate::source::SystemSource; - static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\"; + static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\"; - #[cfg(feature = \"source\")] - #[test] - ``` + #[cfg(feature = \"source\")] + #[test] + ``` - 55 lines remaining in ancestor node. Read the file to see all. + 55 lines remaining in ancestor node. Read the file to see all. - ### mod test › L947-951 - ``` - } + ### mod test › L947-951 + ``` + } - #[test] - fn test_core_text_to_css_font_weight() { - // Exact matches - ``` + #[test] + fn test_core_text_to_css_font_weight() { + // Exact matches + ``` - ### mod test › L959-963 - ``` - } + ### mod test › L959-963 + ``` + } - #[test] - fn test_core_text_to_css_font_stretch() { - // Exact matches - ``` + #[test] + fn test_core_text_to_css_font_stretch() { + // Exact matches + ``` - ## Matches in font-kit/src/loaders/freetype.rs + ## Matches in font-kit/src/loaders/freetype.rs - ### mod test › L1238-1248 - ``` - mod test { - use crate::loaders::freetype::Font; + ### mod test › L1238-1248 + ``` + mod test { + use crate::loaders::freetype::Font; - static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\"; - static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\"; + static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\"; + static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\"; - #[test] - fn get_pcf_postscript_name() { - let font = Font::from_path(PCF_FONT_PATH, 0).unwrap(); - assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME); - } - ``` + #[test] + fn get_pcf_postscript_name() { + let font = Font::from_path(PCF_FONT_PATH, 0).unwrap(); + assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME); + } + ``` - 1 lines remaining in ancestor node. Read the file to see all. + 1 lines remaining in ancestor node. Read the file to see all. - ## Matches in font-kit/src/sources/core_text.rs + ## Matches in font-kit/src/sources/core_text.rs - ### mod test › L265-275 - ``` - mod test { - use crate::properties::{Stretch, Weight}; + ### mod test › L265-275 + ``` + mod test { + use crate::properties::{Stretch, Weight}; - #[test] - fn test_css_to_core_text_font_weight() { - // Exact matches - assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7); - assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0); - assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4); - assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8); + #[test] + fn test_css_to_core_text_font_weight() { + // Exact matches + assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7); + assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0); + assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4); + assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8); - ``` + ``` - 27 lines remaining in ancestor node. Read the file to see all. + 27 lines remaining in ancestor node. Read the file to see all. - ### mod test › L278-282 - ``` - } + ### mod test › L278-282 + ``` + } - #[test] - fn test_css_to_core_text_font_stretch() { - // Exact matches - ``` - "}, + #[test] + fn test_css_to_core_text_font_stretch() { + // Exact matches + ``` + "}, )], ), message( @@ -648,11 +678,11 @@ fn eval_from_pixels_constructor() { ], Some(input_file_content.into()), EvalAssertion::judge_diff(indoc! {" - - The diff contains a new `from_pixels` constructor - - The diff contains new tests for the `from_pixels` constructor - "}), - ), - ); + - The diff contains a new `from_pixels` constructor + - The diff contains new tests for the `from_pixels` constructor + "}), + )) + }); } #[test] @@ -670,11 +700,9 @@ fn eval_zode() { let input_file_path = "root/zode.py"; let input_content = None; let edit_description = "Create the main Zode CLI script"; - eval( - 50, - 1., - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(50, 1., mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]), message( @@ -733,7 +761,7 @@ fn eval_zode() { ], ), ], - input_content, + input_content.clone(), EvalAssertion::new(async move |sample, _, _cx| { let invalid_starts = [' ', '`', '\n']; let mut message = String::new(); @@ -758,8 +786,8 @@ fn eval_zode() { }) } }), - ), - ); + )) + }); } #[test] @@ -777,19 +805,17 @@ fn eval_add_overwrite_test() { let input_file_path = "root/action_log.rs"; let input_file_content = include_str!("evals/fixtures/add_overwrite_test/before.rs"); let edit_description = "Add a new test for overwriting a file in action_log.rs"; - eval( - 200, - 0.5, // TODO: make this eval better - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(200, 0.5, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message( User, [text(indoc! {" - Introduce a new test in `action_log.rs` to test overwriting a file. - That is, a file already exists, but we call `buffer_created` as if the file were new. - Take inspiration from all the other tests in the file. - "})], + Introduce a new test in `action_log.rs` to test overwriting a file. + That is, a file already exists, but we call `buffer_created` as if the file were new. + Take inspiration from all the other tests in the file. + "})], ), message( Assistant, @@ -809,81 +835,81 @@ fn eval_add_overwrite_test() { "tool_1", "read_file", indoc! {" - pub struct ActionLog [L13-20] - tracked_buffers [L15] - edited_since_project_diagnostics_check [L17] - project [L19] - impl ActionLog [L22-498] - pub fn new [L24-30] - pub fn project [L32-34] - pub fn checked_project_diagnostics [L37-39] - pub fn has_edited_files_since_project_diagnostics_check [L42-44] - fn track_buffer_internal [L46-101] - fn handle_buffer_event [L103-116] - fn handle_buffer_edited [L118-123] - fn handle_buffer_file_changed [L125-158] - async fn maintain_diff [L160-264] - pub fn buffer_read [L267-269] - pub fn buffer_created [L272-276] - pub fn buffer_edited [L279-287] - pub fn will_delete_buffer [L289-304] - pub fn keep_edits_in_range [L306-364] - pub fn reject_edits_in_ranges [L366-459] - pub fn keep_all_edits [L461-473] - pub fn changed_buffers [L476-482] - pub fn stale_buffers [L485-497] - fn apply_non_conflicting_edits [L500-561] - fn diff_snapshots [L563-585] - fn point_to_row_edit [L587-614] - enum ChangeAuthor [L617-620] - User [L618] - Agent [L619] - enum TrackedBufferStatus [L623-627] - Created [L624] - Modified [L625] - Deleted [L626] - struct TrackedBuffer [L629-641] - buffer [L630] - base_text [L631] - unreviewed_changes [L632] - status [L633] - version [L634] - diff [L635] - snapshot [L636] - diff_update [L637] - _open_lsp_handle [L638] - _maintain_diff [L639] - _subscription [L640] - impl TrackedBuffer [L643-657] - fn has_changes [L644-650] - fn schedule_diff_update [L652-656] - pub struct ChangedBuffer [L659-661] - pub diff [L660] - mod tests [L664-1574] - fn init_logger [L678-682] - fn init_test [L684-691] - async fn test_keep_edits [L694-769] - async fn test_deletions [L772-854] - async fn test_overlapping_user_edits [L857-951] - async fn test_creating_files [L954-1010] - async fn test_deleting_files [L1013-1120] - async fn test_reject_edits [L1123-1255] - async fn test_reject_multiple_edits [L1258-1331] - async fn test_reject_deleted_file [L1334-1388] - async fn test_reject_created_file [L1391-1443] - async fn test_random_diffs [L1446-1535] - fn quiesce [L1510-1534] - struct HunkStatus [L1538-1542] - range [L1539] - diff_status [L1540] - old_text [L1541] - fn unreviewed_hunks [L1544-1573] - - Showing symbols 1-69 (total symbols: 69) - - Using the line numbers in this outline, you can call this tool again while specifying - the start_line and end_line fields to see the implementations of symbols in the outline. - "}, + pub struct ActionLog [L13-20] + tracked_buffers [L15] + edited_since_project_diagnostics_check [L17] + project [L19] + impl ActionLog [L22-498] + pub fn new [L24-30] + pub fn project [L32-34] + pub fn checked_project_diagnostics [L37-39] + pub fn has_edited_files_since_project_diagnostics_check [L42-44] + fn track_buffer_internal [L46-101] + fn handle_buffer_event [L103-116] + fn handle_buffer_edited [L118-123] + fn handle_buffer_file_changed [L125-158] + async fn maintain_diff [L160-264] + pub fn buffer_read [L267-269] + pub fn buffer_created [L272-276] + pub fn buffer_edited [L279-287] + pub fn will_delete_buffer [L289-304] + pub fn keep_edits_in_range [L306-364] + pub fn reject_edits_in_ranges [L366-459] + pub fn keep_all_edits [L461-473] + pub fn changed_buffers [L476-482] + pub fn stale_buffers [L485-497] + fn apply_non_conflicting_edits [L500-561] + fn diff_snapshots [L563-585] + fn point_to_row_edit [L587-614] + enum ChangeAuthor [L617-620] + User [L618] + Agent [L619] + enum TrackedBufferStatus [L623-627] + Created [L624] + Modified [L625] + Deleted [L626] + struct TrackedBuffer [L629-641] + buffer [L630] + base_text [L631] + unreviewed_changes [L632] + status [L633] + version [L634] + diff [L635] + snapshot [L636] + diff_update [L637] + _open_lsp_handle [L638] + _maintain_diff [L639] + _subscription [L640] + impl TrackedBuffer [L643-657] + fn has_changes [L644-650] + fn schedule_diff_update [L652-656] + pub struct ChangedBuffer [L659-661] + pub diff [L660] + mod tests [L664-1574] + fn init_logger [L678-682] + fn init_test [L684-691] + async fn test_keep_edits [L694-769] + async fn test_deletions [L772-854] + async fn test_overlapping_user_edits [L857-951] + async fn test_creating_files [L954-1010] + async fn test_deleting_files [L1013-1120] + async fn test_reject_edits [L1123-1255] + async fn test_reject_multiple_edits [L1258-1331] + async fn test_reject_deleted_file [L1334-1388] + async fn test_reject_created_file [L1391-1443] + async fn test_random_diffs [L1446-1535] + fn quiesce [L1510-1534] + struct HunkStatus [L1538-1542] + range [L1539] + diff_status [L1540] + old_text [L1541] + fn unreviewed_hunks [L1544-1573] + + Showing symbols 1-69 (total symbols: 69) + + Using the line numbers in this outline, you can call this tool again while specifying + the start_line and end_line fields to see the implementations of symbols in the outline. + "}, )], ), message( @@ -963,17 +989,17 @@ fn eval_add_overwrite_test() { Assistant, [ text(indoc! {" - Based on my understanding of the existing tests and code, I'll now create a new test for overwriting a file. In this test, we'll: + Based on my understanding of the existing tests and code, I'll now create a new test for overwriting a file. In this test, we'll: - 1. Create a file - 2. Open a buffer for it - 3. Track it in the action log - 4. Modify the buffer - 5. Call `buffer_created` again as if the file were new - 6. Check that the changes are properly tracked + 1. Create a file + 2. Open a buffer for it + 3. Track it in the action log + 4. Modify the buffer + 5. Call `buffer_created` again as if the file were new + 6. Check that the changes are properly tracked - Let's write the test: - "}), + Let's write the test: + "}), tool_use( "tool_5", "edit_file", @@ -990,8 +1016,8 @@ fn eval_add_overwrite_test() { EvalAssertion::judge_diff( "A new test for overwritten files was created, without changing any previous test", ), - ), - ); + )) + }); } #[test] @@ -1016,20 +1042,18 @@ fn eval_create_empty_file() { let input_file_content = None; let expected_output_content = String::new(); - eval( - 100, - 0.99, - 0.05, - EvalInput::from_conversation( + + eval_utils::eval(100, 0.99, mismatched_tag_threshold(0.05), move || { + run_eval(EvalInput::from_conversation( vec![ message(User, [text("Create a second empty todo file ")]), message( Assistant, [ text(formatdoc! {" - I'll help you create a second empty todo file. - First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one. - "}), + I'll help you create a second empty todo file. + First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one. + "}), tool_use( "toolu_01GAF8TtsgpjKxCr8fgQLDgR", "list_directory", @@ -1051,8 +1075,8 @@ fn eval_create_empty_file() { Assistant, [ text(formatdoc! {" - I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory: - "}), + I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory: + "}), tool_use( "toolu_01Tb3iQ9griqSYMmVuykQPWU", "edit_file", @@ -1065,12 +1089,12 @@ fn eval_create_empty_file() { ], ), ], - input_file_content, + input_file_content.clone(), // Bad behavior is to write something like // "I'll create an empty TODO3 file as requested." - EvalAssertion::assert_eq(expected_output_content), - ), - ); + EvalAssertion::assert_eq(expected_output_content.clone()), + )) + }); } fn message( @@ -1081,6 +1105,7 @@ fn message( role, content: contents.into_iter().collect(), cache: false, + reasoning_details: None, } } @@ -1108,6 +1133,7 @@ fn tool_use( raw_input: serde_json::to_string_pretty(&input).unwrap(), input: serde_json::to_value(input).unwrap(), is_input_complete: true, + thought_signature: None, }) } @@ -1267,6 +1293,7 @@ impl EvalAssertion { role: Role::User, content: vec![prompt.into()], cache: false, + reasoning_details: None, }], thinking_allowed: true, ..Default::default() @@ -1309,115 +1336,45 @@ impl EvalAssertion { } } -fn eval( - iterations: usize, - expected_pass_ratio: f32, - mismatched_tag_threshold: f32, - mut eval: EvalInput, -) { - let mut evaluated_count = 0; - let mut failed_count = 0; - report_progress(evaluated_count, failed_count, iterations); - - let (tx, rx) = mpsc::channel(); - - // Cache the last message in the conversation, and run one instance of the eval so that - // all the next ones are cached. - eval.conversation.last_mut().unwrap().cache = true; - run_eval(eval.clone(), tx.clone()); - - let executor = gpui::background_executor(); - let semaphore = Arc::new(smol::lock::Semaphore::new(32)); - for _ in 1..iterations { - let eval = eval.clone(); - let tx = tx.clone(); - let semaphore = semaphore.clone(); - executor - .spawn(async move { - let _guard = semaphore.acquire().await; - run_eval(eval, tx) - }) - .detach(); - } - drop(tx); - - let mut failed_evals = HashMap::default(); - let mut errored_evals = HashMap::default(); - let mut eval_outputs = Vec::new(); - let mut cumulative_parser_metrics = EditParserMetrics::default(); - while let Ok(output) = rx.recv() { - match output { - Ok(output) => { - cumulative_parser_metrics += output.sample.edit_output.parser_metrics.clone(); - eval_outputs.push(output.clone()); - if output.assertion.score < 80 { - failed_count += 1; - failed_evals - .entry(output.sample.text_after.clone()) - .or_insert(Vec::new()) - .push(output); - } - } - Err(error) => { - failed_count += 1; - *errored_evals.entry(format!("{:?}", error)).or_insert(0) += 1; - } - } - - evaluated_count += 1; - report_progress(evaluated_count, failed_count, iterations); - } - - let actual_pass_ratio = (iterations - failed_count) as f32 / iterations as f32; - println!("Actual pass ratio: {}\n", actual_pass_ratio); - if actual_pass_ratio < expected_pass_ratio { - let mut errored_evals = errored_evals.into_iter().collect::>(); - errored_evals.sort_by_key(|(_, count)| Reverse(*count)); - for (error, count) in errored_evals { - println!("Eval errored {} times. Error: {}", count, error); - } - - let mut failed_evals = failed_evals.into_iter().collect::>(); - failed_evals.sort_by_key(|(_, evals)| Reverse(evals.len())); - for (_buffer_output, failed_evals) in failed_evals { - let eval_output = failed_evals.first().unwrap(); - println!("Eval failed {} times", failed_evals.len()); - println!("{}", eval_output); - } - - panic!( - "Actual pass ratio: {}\nExpected pass ratio: {}", - actual_pass_ratio, expected_pass_ratio - ); - } - - let mismatched_tag_ratio = - cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32; - if mismatched_tag_ratio > mismatched_tag_threshold { - for eval_output in eval_outputs { - println!("{}", eval_output); - } - panic!("Too many mismatched tags: {:?}", cumulative_parser_metrics); - } -} - -fn run_eval(eval: EvalInput, tx: mpsc::Sender>) { +fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput { let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng()); let mut cx = TestAppContext::build(dispatcher, None); - let output = cx.executor().block_test(async { + let result = cx.executor().block_test(async { let test = EditAgentTest::new(&mut cx).await; test.eval(eval, &mut cx).await }); - tx.send(output).unwrap(); + cx.quit(); + match result { + Ok(output) => eval_utils::EvalOutput { + data: output.to_string(), + outcome: if output.assertion.score < 80 { + eval_utils::OutcomeKind::Failed + } else { + eval_utils::OutcomeKind::Passed + }, + metadata: EditEvalMetadata { + tags: output.sample.edit_output.parser_metrics.tags, + mismatched_tags: output.sample.edit_output.parser_metrics.mismatched_tags, + }, + }, + Err(e) => eval_utils::EvalOutput { + data: format!("{e:?}"), + outcome: eval_utils::OutcomeKind::Error, + metadata: EditEvalMetadata { + tags: 0, + mismatched_tags: 0, + }, + }, + } } #[derive(Clone)] -struct EvalOutput { +struct EditEvalOutput { sample: EvalSample, assertion: EvalAssertionOutcome, } -impl Display for EvalOutput { +impl Display for EditEvalOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Score: {:?}", self.assertion.score)?; if let Some(message) = self.assertion.message.as_ref() { @@ -1436,22 +1393,6 @@ impl Display for EvalOutput { } } -fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usize) { - let passed_count = evaluated_count - failed_count; - let passed_ratio = if evaluated_count == 0 { - 0.0 - } else { - passed_count as f64 / evaluated_count as f64 - }; - print!( - "\r\x1b[KEvaluated {}/{} ({:.2}% passed)", - evaluated_count, - iterations, - passed_ratio * 100.0 - ); - std::io::stdout().flush().unwrap(); -} - struct EditAgentTest { agent: EditAgent, project: Entity, @@ -1547,7 +1488,10 @@ impl EditAgentTest { }) } - async fn eval(&self, eval: EvalInput, cx: &mut TestAppContext) -> Result { + async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result { + // Make sure the last message in the conversation is cached. + eval.conversation.last_mut().unwrap().cache = true; + let path = self .project .read_with(cx, |project, cx| { @@ -1593,6 +1537,7 @@ impl EditAgentTest { role: Role::System, content: vec![MessageContent::Text(system_prompt)], cache: true, + reasoning_details: None, }] .into_iter() .chain(eval.conversation) @@ -1652,7 +1597,7 @@ impl EditAgentTest { .run(&sample, self.judge_model.clone(), cx) .await?; - Ok(EvalOutput { assertion, sample }) + Ok(EditEvalOutput { assertion, sample }) } } diff --git a/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md b/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md index 902e43857c3214cde68372f1c9ff5f8015528ae2..29755d441f7a4f74709c1ac414e2a9a73fe6ac21 100644 --- a/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md +++ b/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md @@ -2,12 +2,12 @@ - We're starting from a completely blank project - Like Aider/Claude Code you take the user's initial prompt and then call the LLM and perform tool calls in a loop until the ultimate goal is achieved. - Unlike Aider or Claude code, it's not intended to be interactive. Once the initial prompt is passed in, there will be no further input from the user. -- The system you will build must reach the stated goal just by performing too calls and calling the LLM +- The system you will build must reach the stated goal just by performing tool calls and calling the LLM - I want you to build this in python. Use the anthropic python sdk and the model context protocol sdk. Use a virtual env and pip to install dependencies - Follow the anthropic guidance on tool calls: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/overview - Use this Anthropic model: `claude-3-7-sonnet-20250219` - Use this Anthropic API Key: `sk-ant-api03-qweeryiofdjsncmxquywefidopsugus` -- One of the most important pieces to this is having good too calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool +- One of the most important pieces to this is having good tool calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool - The cli tool should be invocable via python zode.py file.md where file.md is any possible file that contains the users prompt. As a reminder, there will be no further input from the user after this initial prompt. Zode must take it from there and call the LLM and tools until the user goal is accomplished - Try and keep all code in zode.py and make heavy use of the asks I mentioned - Once you’ve implemented this, you must run python zode.py eval/instructions.md to see how well our new agent tool does! diff --git a/crates/agent/src/history_store.rs b/crates/agent/src/history_store.rs index 3bfbd99677feed5db53d96d2fa96316ac49abce4..c455f73316e3fc7a641fa8a31ac0ad766a2ae584 100644 --- a/crates/agent/src/history_store.rs +++ b/crates/agent/src/history_store.rs @@ -188,6 +188,15 @@ impl HistoryStore { }) } + pub fn delete_threads(&mut self, cx: &mut Context) -> Task> { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let database = database_future.await.map_err(|err| anyhow!(err))?; + database.delete_threads().await?; + this.update(cx, |this, cx| this.reload(cx)) + }) + } + pub fn delete_text_thread( &mut self, path: Arc, @@ -207,14 +216,10 @@ impl HistoryStore { } pub fn reload(&self, cx: &mut Context) { - let database_future = ThreadsDatabase::connect(cx); + let database_connection = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { - let threads = database_future - .await - .map_err(|err| anyhow!(err))? - .list_threads() - .await?; - + let database = database_connection.await; + let threads = database.map_err(|err| anyhow!(err))?.list_threads().await?; this.update(cx, |this, cx| { if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES { for thread in threads @@ -335,7 +340,8 @@ impl HistoryStore { fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { cx.background_spawn(async move { if cfg!(any(feature = "test-support", test)) { - anyhow::bail!("history store does not persist in tests"); + log::warn!("history store does not persist in tests"); + return Ok(VecDeque::new()); } let json = KEY_VALUE_STORE .read_kvp(RECENTLY_OPENED_THREADS_KEY)? @@ -345,9 +351,9 @@ impl HistoryStore { .into_iter() .take(MAX_RECENTLY_OPENED_ENTRIES) .flat_map(|entry| match entry { - SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread( - acp::SessionId(id.as_str().into()), - )), + SerializedRecentOpen::AcpThread(id) => { + Some(HistoryEntryId::AcpThread(acp::SessionId::new(id.as_str()))) + } SerializedRecentOpen::TextThread(file_name) => Some( HistoryEntryId::TextThread(text_threads_dir().join(file_name).into()), ), diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index 4c78c5a3f85b6628f9784fe7ecbadc8531b017d0..a9ade8141a678329e0dd8dad9808e55eee3c382b 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -21,10 +21,6 @@ impl NativeAgentServer { } impl AgentServer for NativeAgentServer { - fn telemetry_id(&self) -> &'static str { - "zed" - } - fn name(&self) -> SharedString { "Zed Agent".into() } diff --git a/crates/agent/src/outline.rs b/crates/agent/src/outline.rs index 262fa8d3d139a5c8f5900d0dd55348f9dc716167..77af4849ffd19c1630331f5c755ff372cb69aeba 100644 --- a/crates/agent/src/outline.rs +++ b/crates/agent/src/outline.rs @@ -44,14 +44,31 @@ pub async fn get_buffer_content_or_outline( .collect::>() })?; + // If no outline exists, fall back to first 1KB so the agent has some context + if outline_items.is_empty() { + let text = buffer.read_with(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + let len = snapshot.len().min(snapshot.as_rope().floor_char_boundary(1024)); + let content = snapshot.text_for_range(0..len).collect::(); + if let Some(path) = path { + format!("# First 1KB of {path} (file too large to show full content, and no outline available)\n\n{content}") + } else { + format!("# First 1KB of file (file too large to show full content, and no outline available)\n\n{content}") + } + })?; + + return Ok(BufferContent { + text, + is_outline: false, + }); + } + let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?; let text = if let Some(path) = path { - format!( - "# File outline for {path} (file too large to show full content)\n\n{outline_text}", - ) + format!("# File outline for {path}\n\n{outline_text}",) } else { - format!("# File outline (file too large to show full content)\n\n{outline_text}",) + format!("# File outline\n\n{outline_text}",) }; Ok(BufferContent { text, @@ -140,3 +157,62 @@ fn render_entries( entries_rendered } + +#[cfg(test)] +mod tests { + use super::*; + use fs::FakeFs; + use gpui::TestAppContext; + use project::Project; + use settings::SettingsStore; + + #[gpui::test] + async fn test_large_file_fallback_to_subset(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings = SettingsStore::test(cx); + cx.set_global(settings); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let content = "⚡".repeat(100 * 1024); // 100KB + let content_len = content.len(); + let buffer = project + .update(cx, |project, cx| project.create_buffer(true, cx)) + .await + .expect("failed to create buffer"); + + buffer.update(cx, |buffer, cx| buffer.set_text(content, cx)); + + let result = cx + .spawn(|cx| async move { get_buffer_content_or_outline(buffer, None, &cx).await }) + .await + .unwrap(); + + // Should contain some of the actual file content + assert!( + result.text.contains("⚡⚡⚡⚡⚡⚡⚡"), + "Result did not contain content subset" + ); + + // Should be marked as not an outline (it's truncated content) + assert!( + !result.is_outline, + "Large file without outline should not be marked as outline" + ); + + // Should be reasonably sized (much smaller than original) + assert!( + result.text.len() < 50 * 1024, + "Result size {} should be smaller than 50KB", + result.text.len() + ); + + // Should be significantly smaller than the original content + assert!( + result.text.len() < content_len / 10, + "Result should be much smaller than original content" + ); + } +} diff --git a/crates/agent/src/templates/system_prompt.hbs b/crates/agent/src/templates/system_prompt.hbs index 4620647135631fdb367b0dc2604e89770a938c07..2477e46a85183813f61bb60d7e3de7f119a4f00c 100644 --- a/crates/agent/src/templates/system_prompt.hbs +++ b/crates/agent/src/templates/system_prompt.hbs @@ -16,7 +16,7 @@ You are a highly skilled software engineer with extensive knowledge in many prog 3. DO NOT use tools to access items that are already available in the context section. 4. Use only the tools that are currently available. 5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off. -6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers. +6. When running commands that may run indefinitely or for a long time (such as build scripts, tests, servers, or file watchers), specify `timeout_ms` to bound runtime. If the command times out, the user can always ask you to run it again with a longer timeout or no timeout if they're willing to wait or cancel manually. 7. Avoid HTML entity escaping - use plain characters instead. ## Searching and Reading diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 5d4bdce27cc05d1cf46a4b73821f0a97878fd6f4..45028902e467fe67945ddf444c9ae417dcaed654 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -9,14 +9,16 @@ use collections::IndexMap; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use fs::{FakeFs, Fs}; use futures::{ - StreamExt, + FutureExt as _, StreamExt, channel::{ mpsc::{self, UnboundedReceiver}, oneshot, }, + future::{Fuse, Shared}, }; use gpui::{ - App, AppContext, Entity, Task, TestAppContext, UpdateGlobal, http_client::FakeHttpClient, + App, AppContext, AsyncApp, Entity, Task, TestAppContext, UpdateGlobal, + http_client::FakeHttpClient, }; use indoc::indoc; use language_model::{ @@ -35,12 +37,109 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::json; use settings::{Settings, SettingsStore}; -use std::{path::Path, rc::Rc, sync::Arc, time::Duration}; +use std::{ + path::Path, + pin::Pin, + rc::Rc, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, +}; use util::path; mod test_tools; use test_tools::*; +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); +} + +struct FakeTerminalHandle { + killed: Arc, + wait_for_exit: Shared>, + output: acp::TerminalOutputResponse, + id: acp::TerminalId, +} + +impl FakeTerminalHandle { + fn new_never_exits(cx: &mut App) -> Self { + let killed = Arc::new(AtomicBool::new(false)); + + let killed_for_task = killed.clone(); + let wait_for_exit = cx + .spawn(async move |cx| { + loop { + if killed_for_task.load(Ordering::SeqCst) { + return acp::TerminalExitStatus::new(); + } + cx.background_executor() + .timer(Duration::from_millis(1)) + .await; + } + }) + .shared(); + + Self { + killed, + wait_for_exit, + output: acp::TerminalOutputResponse::new("partial output".to_string(), false), + id: acp::TerminalId::new("fake_terminal".to_string()), + } + } + + fn was_killed(&self) -> bool { + self.killed.load(Ordering::SeqCst) + } +} + +impl crate::TerminalHandle for FakeTerminalHandle { + fn id(&self, _cx: &AsyncApp) -> Result { + Ok(self.id.clone()) + } + + fn current_output(&self, _cx: &AsyncApp) -> Result { + Ok(self.output.clone()) + } + + fn wait_for_exit(&self, _cx: &AsyncApp) -> Result>> { + Ok(self.wait_for_exit.clone()) + } + + fn kill(&self, _cx: &AsyncApp) -> Result<()> { + self.killed.store(true, Ordering::SeqCst); + Ok(()) + } +} + +struct FakeThreadEnvironment { + handle: Rc, +} + +impl crate::ThreadEnvironment for FakeThreadEnvironment { + fn create_terminal( + &self, + _command: String, + _cwd: Option, + _output_byte_limit: Option, + _cx: &mut AsyncApp, + ) -> Task>> { + Task::ready(Ok(self.handle.clone() as Rc)) + } +} + +fn always_allow_tools(cx: &mut TestAppContext) { + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.always_allow_tool_actions = true; + agent_settings::AgentSettings::override_global(settings, cx); + }); +} + #[gpui::test] async fn test_echo(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; @@ -71,6 +170,120 @@ async fn test_echo(cx: &mut TestAppContext) { assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } +#[gpui::test] +async fn test_terminal_tool_timeout_kills_handle(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let handle = Rc::new(cx.update(|cx| FakeTerminalHandle::new_never_exits(cx))); + let environment = Rc::new(FakeThreadEnvironment { + handle: handle.clone(), + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = Arc::new(crate::TerminalTool::new(project, environment)); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::TerminalToolInput { + command: "sleep 1000".to_string(), + cd: ".".to_string(), + timeout_ms: Some(5), + }, + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|c| matches!(c, acp::ToolCallContent::Terminal(_))) + }), + "expected tool call update to include terminal content" + ); + + let mut task_future: Pin>>>> = Box::pin(task.fuse()); + + let deadline = std::time::Instant::now() + Duration::from_millis(500); + loop { + if let Some(result) = task_future.as_mut().now_or_never() { + let result = result.expect("terminal tool task should complete"); + + assert!( + handle.was_killed(), + "expected terminal handle to be killed on timeout" + ); + assert!( + result.contains("partial output"), + "expected result to include terminal output, got: {result}" + ); + return; + } + + if std::time::Instant::now() >= deadline { + panic!("timed out waiting for terminal tool task to complete"); + } + + cx.run_until_parked(); + cx.background_executor.timer(Duration::from_millis(1)).await; + } +} + +#[gpui::test] +#[ignore] +async fn test_terminal_tool_without_timeout_does_not_kill_handle(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let handle = Rc::new(cx.update(|cx| FakeTerminalHandle::new_never_exits(cx))); + let environment = Rc::new(FakeThreadEnvironment { + handle: handle.clone(), + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = Arc::new(crate::TerminalTool::new(project, environment)); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let _task = cx.update(|cx| { + tool.run( + crate::TerminalToolInput { + command: "sleep 1000".to_string(), + cd: ".".to_string(), + timeout_ms: None, + }, + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|c| matches!(c, acp::ToolCallContent::Terminal(_))) + }), + "expected tool call update to include terminal content" + ); + + smol::Timer::after(Duration::from_millis(25)).await; + + assert!( + !handle.was_killed(), + "did not expect terminal handle to be killed without a timeout" + ); +} + #[gpui::test] async fn test_thinking(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; @@ -215,7 +428,8 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { vec![LanguageModelRequestMessage { role: Role::User, content: vec!["Message 1".into()], - cache: true + cache: true, + reasoning_details: None, }] ); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text( @@ -239,17 +453,20 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["Message 1".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec!["Response to Message 1".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec!["Message 2".into()], - cache: true + cache: true, + reasoning_details: None, } ] ); @@ -274,6 +491,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { raw_input: json!({"text": "test"}).to_string(), input: json!({"text": "test"}), is_input_complete: true, + thought_signature: None, }; fake_model .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); @@ -294,37 +512,44 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["Message 1".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec!["Response to Message 1".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec!["Message 2".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec!["Response to Message 2".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec!["Use the echo tool".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec![MessageContent::ToolUse(tool_use)], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec![MessageContent::ToolResult(tool_result)], - cache: true + cache: true, + reasoning_details: None, } ] ); @@ -461,6 +686,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }, )); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( @@ -470,6 +696,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -479,14 +706,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { // Approve the first tool_call_auth_1 .response - .send(tool_call_auth_1.options[1].id.clone()) + .send(tool_call_auth_1.options[1].option_id.clone()) .unwrap(); cx.run_until_parked(); // Reject the second tool_call_auth_2 .response - .send(tool_call_auth_1.options[2].id.clone()) + .send(tool_call_auth_1.options[2].option_id.clone()) .unwrap(); cx.run_until_parked(); @@ -496,14 +723,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { message.content, vec![ language_model::MessageContent::ToolResult(LanguageModelToolResult { - tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(), + tool_use_id: tool_call_auth_1.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::name().into(), is_error: false, content: "Allowed".into(), output: Some("Allowed".into()) }), language_model::MessageContent::ToolResult(LanguageModelToolResult { - tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(), + tool_use_id: tool_call_auth_2.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::name().into(), is_error: true, content: "Permission to run tool denied by user".into(), @@ -520,6 +747,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -528,7 +756,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { let tool_call_auth_3 = next_tool_call_authorization(&mut events).await; tool_call_auth_3 .response - .send(tool_call_auth_3.options[0].id.clone()) + .send(tool_call_auth_3.options[0].option_id.clone()) .unwrap(); cx.run_until_parked(); let completion = fake_model.pending_completions().pop().unwrap(); @@ -537,7 +765,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { message.content, vec![language_model::MessageContent::ToolResult( LanguageModelToolResult { - tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(), + tool_use_id: tool_call_auth_3.tool_call.tool_call_id.0.to_string().into(), tool_name: ToolRequiringPermission::name().into(), is_error: false, content: "Allowed".into(), @@ -554,6 +782,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -592,6 +821,7 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -621,6 +851,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { raw_input: "{}".into(), input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), is_input_complete: true, + thought_signature: None, }; fake_model .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); @@ -641,25 +872,26 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["abc".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec![MessageContent::ToolUse(tool_use.clone())], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec![MessageContent::ToolResult(tool_result.clone())], - cache: true + cache: true, + reasoning_details: None, }, ] ); // Simulate reaching tool use limit. - fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate( - cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached, - )); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached); fake_model.end_last_completion_stream(); let last_event = events.collect::>().await.pop().unwrap(); assert!( @@ -677,22 +909,26 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["abc".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec![MessageContent::ToolUse(tool_use)], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec![MessageContent::ToolResult(tool_result)], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec!["Continue where you left off".into()], - cache: true + cache: true, + reasoning_details: None, } ] ); @@ -731,6 +967,7 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) { raw_input: "{}".into(), input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), is_input_complete: true, + thought_signature: None, }; let tool_result = LanguageModelToolResult { tool_use_id: "tool_id_1".into(), @@ -741,9 +978,7 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) { }; fake_model .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); - fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate( - cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached, - )); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached); fake_model.end_last_completion_stream(); let last_event = events.collect::>().await.pop().unwrap(); assert!( @@ -765,22 +1000,26 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["abc".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec![MessageContent::ToolUse(tool_use)], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec![MessageContent::ToolResult(tool_result)], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, content: vec!["ghi".into()], - cache: true + cache: true, + reasoning_details: None, } ] ); @@ -1037,6 +1276,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { raw_input: json!({"text": "test"}).to_string(), input: json!({"text": "test"}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -1080,6 +1320,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { raw_input: json!({"text": "mcp"}).to_string(), input: json!({"text": "mcp"}), is_input_complete: true, + thought_signature: None, }, )); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( @@ -1089,6 +1330,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { raw_input: json!({"text": "native"}).to_string(), input: json!({"text": "native"}), is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -1324,20 +1566,20 @@ async fn test_cancellation(cx: &mut TestAppContext) { ThreadEvent::ToolCall(tool_call) => { assert_eq!(tool_call.title, expected_tools.remove(0)); if tool_call.title == "Echo" { - echo_id = Some(tool_call.id); + echo_id = Some(tool_call.tool_call_id); } } ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( acp::ToolCallUpdate { - id, + tool_call_id, fields: acp::ToolCallUpdateFields { status: Some(acp::ToolCallStatus::Completed), .. }, - meta: None, + .. }, - )) if Some(&id) == echo_id.as_ref() => { + )) if Some(&tool_call_id) == echo_id.as_ref() => { echo_completed = true; } _ => {} @@ -1788,6 +2030,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { raw_input: "{}".into(), input: json!({}), is_input_complete: true, + thought_signature: None, }; let echo_tool_use = LanguageModelToolUse { id: "tool_id_2".into(), @@ -1795,6 +2038,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { raw_input: json!({"text": "test"}).to_string(), input: json!({"text": "test"}), is_input_complete: true, + thought_signature: None, }; fake_model.send_last_completion_stream_text_chunk("Hi!"); fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( @@ -1818,7 +2062,8 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["Hey!".into()], - cache: true + cache: true, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, @@ -1826,7 +2071,8 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { MessageContent::Text("Hi!".into()), MessageContent::ToolUse(echo_tool_use.clone()) ], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, @@ -1837,7 +2083,8 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { content: "test".into(), output: Some("test".into()) })], - cache: false + cache: false, + reasoning_details: None, }, ], ); @@ -1961,11 +2208,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) { .update(|cx| { connection.prompt( Some(acp_thread::UserMessageId::new()), - acp::PromptRequest { - session_id: session_id.clone(), - prompt: vec!["ghi".into()], - meta: None, - }, + acp::PromptRequest::new(session_id.clone(), vec!["ghi".into()]), cx, ) }) @@ -2000,6 +2243,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { raw_input: input.to_string(), input, is_input_complete: false, + thought_signature: None, }, )); @@ -2012,6 +2256,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { raw_input: input.to_string(), input, is_input_complete: true, + thought_signature: None, }, )); fake_model.end_last_completion_stream(); @@ -2020,68 +2265,50 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { let tool_call = expect_tool_call(&mut events).await; assert_eq!( tool_call, - acp::ToolCall { - id: acp::ToolCallId("1".into()), - title: "Thinking".into(), - kind: acp::ToolKind::Think, - status: acp::ToolCallStatus::Pending, - content: vec![], - locations: vec![], - raw_input: Some(json!({})), - raw_output: None, - meta: Some(json!({ "tool_name": "thinking" })), - } + acp::ToolCall::new("1", "Thinking") + .kind(acp::ToolKind::Think) + .raw_input(json!({})) + .meta(acp::Meta::from_iter([( + "tool_name".into(), + "thinking".into() + )])) ); let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, - acp::ToolCallUpdate { - id: acp::ToolCallId("1".into()), - fields: acp::ToolCallUpdateFields { - title: Some("Thinking".into()), - kind: Some(acp::ToolKind::Think), - raw_input: Some(json!({ "content": "Thinking hard!" })), - ..Default::default() - }, - meta: None, - } + acp::ToolCallUpdate::new( + "1", + acp::ToolCallUpdateFields::new() + .title("Thinking") + .kind(acp::ToolKind::Think) + .raw_input(json!({ "content": "Thinking hard!"})) + ) ); let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, - acp::ToolCallUpdate { - id: acp::ToolCallId("1".into()), - fields: acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::InProgress), - ..Default::default() - }, - meta: None, - } + acp::ToolCallUpdate::new( + "1", + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress) + ) ); let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, - acp::ToolCallUpdate { - id: acp::ToolCallId("1".into()), - fields: acp::ToolCallUpdateFields { - content: Some(vec!["Thinking hard!".into()]), - ..Default::default() - }, - meta: None, - } + acp::ToolCallUpdate::new( + "1", + acp::ToolCallUpdateFields::new().content(vec!["Thinking hard!".into()]) + ) ); let update = expect_tool_call_update_fields(&mut events).await; assert_eq!( update, - acp::ToolCallUpdate { - id: acp::ToolCallId("1".into()), - fields: acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::Completed), - raw_output: Some("Finished thinking.".into()), - ..Default::default() - }, - meta: None, - } + acp::ToolCallUpdate::new( + "1", + acp::ToolCallUpdateFields::new() + .status(acp::ToolCallStatus::Completed) + .raw_output("Finished thinking.") + ) ); } @@ -2214,6 +2441,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { raw_input: json!({"text": "test"}).to_string(), input: json!({"text": "test"}), is_input_complete: true, + thought_signature: None, }; fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( tool_use_1.clone(), @@ -2232,12 +2460,14 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { LanguageModelRequestMessage { role: Role::User, content: vec!["Call the echo tool!".into()], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::Assistant, content: vec![language_model::MessageContent::ToolUse(tool_use_1.clone())], - cache: false + cache: false, + reasoning_details: None, }, LanguageModelRequestMessage { role: Role::User, @@ -2250,7 +2480,8 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { output: Some("test".into()) } )], - cache: true + cache: true, + reasoning_details: None, }, ] ); @@ -2264,7 +2495,8 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { thread.last_message(), Some(Message::Agent(AgentMessage { content: vec![AgentMessageContent::Text("Done".into())], - tool_results: IndexMap::default() + tool_results: IndexMap::default(), + reasoning_details: None, })) ); }) @@ -2512,7 +2744,7 @@ fn setup_context_server( let mut settings = ProjectSettings::get_global(cx).clone(); settings.context_servers.insert( name.into(), - project::project_settings::ContextServerSettings::Custom { + project::project_settings::ContextServerSettings::Stdio { enabled: true, command: ContextServerCommand { path: "somebinary".into(), @@ -2577,3 +2809,181 @@ fn setup_context_server( cx.run_until_parked(); mcp_tool_calls_rx } + +#[gpui::test] +async fn test_tokens_before_message(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + // First message + let message_1_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_1_id.clone(), ["First message"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Before any response, tokens_before_message should return None for first message + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.tokens_before_message(&message_1_id), + None, + "First message should have no tokens before it" + ); + }); + + // Complete first message with usage + fake_model.send_last_completion_stream_text_chunk("Response 1"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 100, + output_tokens: 50, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // First message still has no tokens before it + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.tokens_before_message(&message_1_id), + None, + "First message should still have no tokens before it after response" + ); + }); + + // Second message + let message_2_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_2_id.clone(), ["Second message"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Second message should have first message's input tokens before it + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.tokens_before_message(&message_2_id), + Some(100), + "Second message should have 100 tokens before it (from first request)" + ); + }); + + // Complete second message + fake_model.send_last_completion_stream_text_chunk("Response 2"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 250, // Total for this request (includes previous context) + output_tokens: 75, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // Third message + let message_3_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_3_id.clone(), ["Third message"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Third message should have second message's input tokens (250) before it + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.tokens_before_message(&message_3_id), + Some(250), + "Third message should have 250 tokens before it (from second request)" + ); + // Second message should still have 100 + assert_eq!( + thread.tokens_before_message(&message_2_id), + Some(100), + "Second message should still have 100 tokens before it" + ); + // First message still has none + assert_eq!( + thread.tokens_before_message(&message_1_id), + None, + "First message should still have no tokens before it" + ); + }); +} + +#[gpui::test] +async fn test_tokens_before_message_after_truncate(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + // Set up three messages with responses + let message_1_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_1_id.clone(), ["Message 1"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Response 1"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 100, + output_tokens: 50, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let message_2_id = UserMessageId::new(); + thread + .update(cx, |thread, cx| { + thread.send(message_2_id.clone(), ["Message 2"], cx) + }) + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Response 2"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 250, + output_tokens: 75, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // Verify initial state + thread.read_with(cx, |thread, _| { + assert_eq!(thread.tokens_before_message(&message_2_id), Some(100)); + }); + + // Truncate at message 2 (removes message 2 and everything after) + thread + .update(cx, |thread, cx| thread.truncate(message_2_id.clone(), cx)) + .unwrap(); + cx.run_until_parked(); + + // After truncation, message_2_id no longer exists, so lookup should return None + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.tokens_before_message(&message_2_id), + None, + "After truncation, message 2 no longer exists" + ); + // Message 1 still exists but has no tokens before it + assert_eq!( + thread.tokens_before_message(&message_1_id), + None, + "First message still has no tokens before it" + ); + }); +} diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 5cf230629c8e542a23ea7ffc5bdb0fa5a1c73a53..ef3ca23c3caf816a28e91e9e75b21f2cc80451e7 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2,7 +2,8 @@ use crate::{ ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, - SystemPromptTemplate, Template, Templates, TerminalTool, ThinkingTool, WebSearchTool, + RestoreFileFromDiskTool, SaveFileTool, SystemPromptTemplate, Template, Templates, TerminalTool, + ThinkingTool, WebSearchTool, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; @@ -15,7 +16,7 @@ use agent_settings::{ use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; use client::{ModelRequestUsage, RequestUsage, UserStore}; -use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit}; +use cloud_llm_client::{CompletionIntent, Plan, UsageLimit}; use collections::{HashMap, HashSet, IndexMap}; use fs::Fs; use futures::stream; @@ -107,12 +108,19 @@ impl Message { pub fn to_request(&self) -> Vec { match self { - Message::User(message) => vec![message.to_request()], + Message::User(message) => { + if message.content.is_empty() { + vec![] + } else { + vec![message.to_request()] + } + } Message::Agent(message) => message.to_request(), Message::Resume => vec![LanguageModelRequestMessage { role: Role::User, content: vec!["Continue where you left off".into()], cache: false, + reasoning_details: None, }], } } @@ -177,6 +185,7 @@ impl UserMessage { role: Role::User, content: Vec::with_capacity(self.content.len()), cache: false, + reasoning_details: None, }; const OPEN_CONTEXT: &str = "\n\ @@ -444,6 +453,7 @@ impl AgentMessage { role: Role::Assistant, content: Vec::with_capacity(self.content.len()), cache: false, + reasoning_details: self.reasoning_details.clone(), }; for chunk in &self.content { match chunk { @@ -479,6 +489,7 @@ impl AgentMessage { role: Role::User, content: Vec::new(), cache: false, + reasoning_details: None, }; for tool_result in self.tool_results.values() { @@ -508,6 +519,7 @@ impl AgentMessage { pub struct AgentMessage { pub content: Vec, pub tool_results: IndexMap, + pub reasoning_details: Option, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -525,6 +537,7 @@ pub trait TerminalHandle { fn id(&self, cx: &AsyncApp) -> Result; fn current_output(&self, cx: &AsyncApp) -> Result; fn wait_for_exit(&self, cx: &AsyncApp) -> Result>>; + fn kill(&self, cx: &AsyncApp) -> Result<()>; } pub trait ThreadEnvironment { @@ -607,17 +620,16 @@ pub struct Thread { pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, pub(crate) action_log: Entity, + /// Tracks the last time files were read by the agent, to detect external modifications + pub(crate) file_read_times: HashMap, } impl Thread { fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities { let image = model.map_or(true, |model| model.supports_images()); - acp::PromptCapabilities { - meta: None, - image, - audio: false, - embedded_context: true, - } + acp::PromptCapabilities::new() + .image(image) + .embedded_context(true) } pub fn new( @@ -633,7 +645,7 @@ impl Thread { let (prompt_capabilities_tx, prompt_capabilities_rx) = watch::channel(Self::prompt_capabilities(model.as_deref())); Self { - id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()), + id: acp::SessionId::new(uuid::Uuid::new_v4().to_string()), prompt_id: PromptId::new(), updated_at: Utc::now(), title: None, @@ -665,6 +677,7 @@ impl Thread { prompt_capabilities_rx, project, action_log, + file_read_times: HashMap::default(), } } @@ -729,17 +742,11 @@ impl Thread { let Some(tool) = tool else { stream .0 - .unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall { - meta: None, - id: acp::ToolCallId(tool_use.id.to_string().into()), - title: tool_use.name.to_string(), - kind: acp::ToolKind::Other, - status: acp::ToolCallStatus::Failed, - content: Vec::new(), - locations: Vec::new(), - raw_input: Some(tool_use.input.clone()), - raw_output: None, - }))) + .unbounded_send(Ok(ThreadEvent::ToolCall( + acp::ToolCall::new(tool_use.id.to_string(), tool_use.name.to_string()) + .status(acp::ToolCallStatus::Failed) + .raw_input(tool_use.input.clone()), + ))) .ok(); return; }; @@ -769,8 +776,8 @@ impl Thread { stream.update_tool_call_fields( &tool_use.id, - acp::ToolCallUpdateFields { - status: Some( + acp::ToolCallUpdateFields::new() + .status( tool_result .as_ref() .map_or(acp::ToolCallStatus::Failed, |result| { @@ -780,10 +787,8 @@ impl Thread { acp::ToolCallStatus::Completed } }), - ), - raw_output: output, - ..Default::default() - }, + ) + .raw_output(output), ); } @@ -860,6 +865,7 @@ impl Thread { updated_at: db_thread.updated_at, prompt_capabilities_tx, prompt_capabilities_rx, + file_read_times: HashMap::default(), } } @@ -999,9 +1005,12 @@ impl Thread { self.add_tool(NowTool); self.add_tool(OpenTool::new(self.project.clone())); self.add_tool(ReadFileTool::new( + cx.weak_entity(), self.project.clone(), self.action_log.clone(), )); + self.add_tool(SaveFileTool::new(self.project.clone())); + self.add_tool(RestoreFileFromDiskTool::new(self.project.clone())); self.add_tool(TerminalTool::new(self.project.clone(), environment)); self.add_tool(ThinkingTool); self.add_tool(WebSearchTool); @@ -1086,6 +1095,28 @@ impl Thread { }) } + /// Get the total input token count as of the message before the given message. + /// + /// Returns `None` if: + /// - `target_id` is the first message (no previous message) + /// - The previous message hasn't received a response yet (no usage data) + /// - `target_id` is not found in the messages + pub fn tokens_before_message(&self, target_id: &UserMessageId) -> Option { + let mut previous_user_message_id: Option<&UserMessageId> = None; + + for message in &self.messages { + if let Message::User(user_msg) = message { + if &user_msg.id == target_id { + let prev_id = previous_user_message_id?; + let usage = self.request_token_usage.get(prev_id)?; + return Some(usage.input_tokens); + } + previous_user_message_id = Some(&user_msg.id); + } + } + None + } + /// Look up the active profile and resolve its preferred model if one is configured. fn resolve_profile_model( profile_id: &AgentProfileId, @@ -1138,20 +1169,64 @@ impl Thread { where T: Into, { + let content = content.into_iter().map(Into::into).collect::>(); + log::debug!("Thread::send content: {:?}", content); + + self.messages + .push(Message::User(UserMessage { id, content })); + cx.notify(); + + self.send_existing(cx) + } + + pub fn send_existing( + &mut self, + cx: &mut Context, + ) -> Result>> { let model = self.model().context("No language model configured")?; log::info!("Thread::send called with model: {}", model.name().0); self.advance_prompt_id(); - let content = content.into_iter().map(Into::into).collect::>(); - log::debug!("Thread::send content: {:?}", content); + log::debug!("Total messages in thread: {}", self.messages.len()); + self.run_turn(cx) + } + pub fn push_acp_user_block( + &mut self, + id: UserMessageId, + blocks: impl IntoIterator, + path_style: PathStyle, + cx: &mut Context, + ) { + let content = blocks + .into_iter() + .map(|block| UserMessageContent::from_content_block(block, path_style)) + .collect::>(); self.messages .push(Message::User(UserMessage { id, content })); cx.notify(); + } - log::debug!("Total messages in thread: {}", self.messages.len()); - self.run_turn(cx) + pub fn push_acp_agent_block(&mut self, block: acp::ContentBlock, cx: &mut Context) { + let text = match block { + acp::ContentBlock::Text(text_content) => text_content.text, + acp::ContentBlock::Image(_) => "[image]".to_string(), + acp::ContentBlock::Audio(_) => "[audio]".to_string(), + acp::ContentBlock::ResourceLink(resource_link) => resource_link.uri, + acp::ContentBlock::Resource(resource) => match resource.resource { + acp::EmbeddedResourceResource::TextResourceContents(resource) => resource.uri, + acp::EmbeddedResourceResource::BlobResourceContents(resource) => resource.uri, + _ => "[resource]".to_string(), + }, + _ => "[unknown]".to_string(), + }; + + self.messages.push(Message::Agent(AgentMessage { + content: vec![AgentMessageContent::Text(text)], + ..Default::default() + })); + cx.notify(); } #[cfg(feature = "eval")] @@ -1264,15 +1339,13 @@ impl Thread { event_stream.update_tool_call_fields( &tool_result.tool_use_id, - acp::ToolCallUpdateFields { - status: Some(if tool_result.is_error { + acp::ToolCallUpdateFields::new() + .status(if tool_result.is_error { acp::ToolCallStatus::Failed } else { acp::ToolCallStatus::Completed - }), - raw_output: tool_result.output.clone(), - ..Default::default() - }, + }) + .raw_output(tool_result.output.clone()), ); this.update(cx, |this, _cx| { this.pending_message() @@ -1393,6 +1466,18 @@ impl Thread { self.handle_thinking_event(text, signature, event_stream, cx) } RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx), + ReasoningDetails(details) => { + let last_message = self.pending_message(); + // Store the last non-empty reasoning_details (overwrites earlier ones) + // This ensures we keep the encrypted reasoning with signatures, not the early text reasoning + if let serde_json::Value::Array(ref arr) = details { + if !arr.is_empty() { + last_message.reasoning_details = Some(details); + } + } else { + last_message.reasoning_details = Some(details); + } + } ToolUse(tool_use) => { return Ok(self.handle_tool_use_event(tool_use, event_stream, cx)); } @@ -1425,20 +1510,16 @@ impl Thread { ); self.update_token_usage(usage, cx); } - StatusUpdate(CompletionRequestStatus::UsageUpdated { amount, limit }) => { + UsageUpdated { amount, limit } => { self.update_model_request_usage(amount, limit, cx); } - StatusUpdate( - CompletionRequestStatus::Started - | CompletionRequestStatus::Queued { .. } - | CompletionRequestStatus::Failed { .. }, - ) => {} - StatusUpdate(CompletionRequestStatus::ToolUseLimitReached) => { + ToolUseLimitReached => { self.tool_use_limit_reached = true; } Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()), Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()), Stop(StopReason::ToolUse | StopReason::EndTurn) => {} + Started | Queued { .. } => {} } Ok(None) @@ -1542,12 +1623,10 @@ impl Thread { } else { event_stream.update_tool_call_fields( &tool_use.id, - acp::ToolCallUpdateFields { - title: Some(title.into()), - kind: Some(kind), - raw_input: Some(tool_use.input.clone()), - ..Default::default() - }, + acp::ToolCallUpdateFields::new() + .title(title.as_str()) + .kind(kind) + .raw_input(tool_use.input.clone()), ); } @@ -1569,10 +1648,9 @@ impl Thread { let fs = self.project.read(cx).fs().clone(); let tool_event_stream = ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs)); - tool_event_stream.update_fields(acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::InProgress), - ..Default::default() - }); + tool_event_stream.update_fields( + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress), + ); let supports_images = self.model().is_some_and(|model| model.supports_images()); let tool_result = tool.run(tool_use.input, tool_event_stream, cx); log::debug!("Running tool {}", tool_use.name); @@ -1647,6 +1725,10 @@ impl Thread { self.pending_summary_generation.is_some() } + pub fn is_generating_title(&self) -> bool { + self.pending_title_generation.is_some() + } + pub fn summary(&mut self, cx: &mut Context) -> Shared>> { if let Some(summary) = self.summary.as_ref() { return Task::ready(Some(summary.clone())).shared(); @@ -1672,6 +1754,7 @@ impl Thread { role: Role::User, content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()], cache: false, + reasoning_details: None, }); let task = cx @@ -1682,9 +1765,7 @@ impl Thread { let event = event.log_err()?; let text = match event { LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { + LanguageModelCompletionEvent::UsageUpdated { amount, limit } => { this.update(cx, |thread, cx| { thread.update_model_request_usage(amount, limit, cx); }) @@ -1715,7 +1796,7 @@ impl Thread { task } - fn generate_title(&mut self, cx: &mut Context) { + pub fn generate_title(&mut self, cx: &mut Context) { let Some(model) = self.summarization_model.clone() else { return; }; @@ -1738,6 +1819,7 @@ impl Thread { role: Role::User, content: vec![SUMMARIZE_THREAD_PROMPT.into()], cache: false, + reasoning_details: None, }); self.pending_title_generation = Some(cx.spawn(async move |this, cx| { let mut title = String::new(); @@ -1748,9 +1830,7 @@ impl Thread { let event = event?; let text = match event { LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { + LanguageModelCompletionEvent::UsageUpdated { amount, limit } => { this.update(cx, |thread, cx| { thread.update_model_request_usage(amount, limit, cx); })?; @@ -1965,6 +2045,12 @@ impl Thread { self.running_turn.as_ref()?.tools.get(name).cloned() } + pub fn has_tool(&self, name: &str) -> bool { + self.running_turn + .as_ref() + .is_some_and(|turn| turn.tools.contains_key(name)) + } + fn build_request_messages( &self, available_tools: Vec, @@ -1987,6 +2073,7 @@ impl Thread { role: Role::System, content: vec![system_prompt.into()], cache: false, + reasoning_details: None, }]; for message in &self.messages { messages.extend(message.to_request()); @@ -2364,19 +2451,13 @@ impl ThreadEventStream { kind: acp::ToolKind, input: serde_json::Value, ) -> acp::ToolCall { - acp::ToolCall { - meta: Some(serde_json::json!({ - "tool_name": tool_name - })), - id: acp::ToolCallId(id.to_string().into()), - title, - kind, - status: acp::ToolCallStatus::Pending, - content: vec![], - locations: vec![], - raw_input: Some(input), - raw_output: None, - } + acp::ToolCall::new(id.to_string(), title) + .kind(kind) + .raw_input(input) + .meta(acp::Meta::from_iter([( + "tool_name".into(), + tool_name.into(), + )])) } fn update_tool_call_fields( @@ -2386,12 +2467,7 @@ impl ThreadEventStream { ) { self.0 .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( - acp::ToolCallUpdate { - meta: None, - id: acp::ToolCallId(tool_use_id.to_string().into()), - fields, - } - .into(), + acp::ToolCallUpdate::new(tool_use_id.to_string(), fields).into(), ))) .ok(); } @@ -2454,7 +2530,7 @@ impl ToolCallEventStream { .0 .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( acp_thread::ToolCallUpdateDiff { - id: acp::ToolCallId(self.tool_use_id.to_string().into()), + id: acp::ToolCallId::new(self.tool_use_id.to_string()), diff, } .into(), @@ -2472,33 +2548,26 @@ impl ToolCallEventStream { .0 .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization( ToolCallAuthorization { - tool_call: acp::ToolCallUpdate { - meta: None, - id: acp::ToolCallId(self.tool_use_id.to_string().into()), - fields: acp::ToolCallUpdateFields { - title: Some(title.into()), - ..Default::default() - }, - }, + tool_call: acp::ToolCallUpdate::new( + self.tool_use_id.to_string(), + acp::ToolCallUpdateFields::new().title(title.into()), + ), options: vec![ - acp::PermissionOption { - id: acp::PermissionOptionId("always_allow".into()), - name: "Always Allow".into(), - kind: acp::PermissionOptionKind::AllowAlways, - meta: None, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("allow".into()), - name: "Allow".into(), - kind: acp::PermissionOptionKind::AllowOnce, - meta: None, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("deny".into()), - name: "Deny".into(), - kind: acp::PermissionOptionKind::RejectOnce, - meta: None, - }, + acp::PermissionOption::new( + acp::PermissionOptionId::new("always_allow"), + "Always Allow", + acp::PermissionOptionKind::AllowAlways, + ), + acp::PermissionOption::new( + acp::PermissionOptionId::new("allow"), + "Allow", + acp::PermissionOptionKind::AllowOnce, + ), + acp::PermissionOption::new( + acp::PermissionOptionId::new("deny"), + "Deny", + acp::PermissionOptionKind::RejectOnce, + ), ], response: response_tx, }, @@ -2643,7 +2712,15 @@ impl UserMessageContent { // TODO Self::Text("[blob]".to_string()) } + other => { + log::warn!("Unexpected content type: {:?}", other); + Self::Text("[unknown]".to_string()) + } }, + other => { + log::warn!("Unexpected content type: {:?}", other); + Self::Text("[unknown]".to_string()) + } } } } @@ -2651,32 +2728,15 @@ impl UserMessageContent { impl From for acp::ContentBlock { fn from(content: UserMessageContent) -> Self { match content { - UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent { - data: image.source.to_string(), - mime_type: "image/png".to_string(), - meta: None, - annotations: None, - uri: None, - }), - UserMessageContent::Mention { uri, content } => { - acp::ContentBlock::Resource(acp::EmbeddedResource { - meta: None, - resource: acp::EmbeddedResourceResource::TextResourceContents( - acp::TextResourceContents { - meta: None, - mime_type: None, - text: content, - uri: uri.to_uri().to_string(), - }, - ), - annotations: None, - }) + UserMessageContent::Text(text) => text.into(), + UserMessageContent::Image(image) => { + acp::ContentBlock::Image(acp::ImageContent::new(image.source, "image/png")) } + UserMessageContent::Mention { uri, content } => acp::ContentBlock::Resource( + acp::EmbeddedResource::new(acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents::new(content, uri.to_uri().to_string()), + )), + ), } } } @@ -2684,7 +2744,6 @@ impl From for acp::ContentBlock { fn convert_image(image_content: acp::ImageContent) -> LanguageModelImage { LanguageModelImage { source: image_content.data.into(), - // TODO: make this optional? - size: gpui::Size::new(0.into(), 0.into()), + size: None, } } diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index 1d3c0d557716ec3a52f910971547df4ee764cab0..358903a32baa5ead9b073642015e6829501307a2 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -12,6 +12,9 @@ mod move_path_tool; mod now_tool; mod open_tool; mod read_file_tool; +mod restore_file_from_disk_tool; +mod save_file_tool; + mod terminal_tool; mod thinking_tool; mod web_search_tool; @@ -33,6 +36,9 @@ pub use move_path_tool::*; pub use now_tool::*; pub use open_tool::*; pub use read_file_tool::*; +pub use restore_file_from_disk_tool::*; +pub use save_file_tool::*; + pub use terminal_tool::*; pub use thinking_tool::*; pub use web_search_tool::*; @@ -88,6 +94,8 @@ tools! { NowTool, OpenTool, ReadFileTool, + RestoreFileFromDiskTool, + SaveFileTool, TerminalTool, ThinkingTool, WebSearchTool, diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 03a0ef84e73d4cbca83d61077d568ec58cd7ae2b..3b01b2feb7dd36615a8ba7c63d81a81694e0d268 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -2,12 +2,24 @@ use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream}; use agent_client_protocol::ToolKind; use anyhow::{Result, anyhow, bail}; use collections::{BTreeMap, HashMap}; -use context_server::ContextServerId; -use gpui::{App, Context, Entity, SharedString, Task}; +use context_server::{ContextServerId, client::NotificationSubscription}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task}; use project::context_server_store::{ContextServerStatus, ContextServerStore}; use std::sync::Arc; use util::ResultExt; +pub struct ContextServerPrompt { + pub server_id: ContextServerId, + pub prompt: context_server::types::Prompt, +} + +pub enum ContextServerRegistryEvent { + ToolsChanged, + PromptsChanged, +} + +impl EventEmitter for ContextServerRegistry {} + pub struct ContextServerRegistry { server_store: Entity, registered_servers: HashMap, @@ -16,7 +28,10 @@ pub struct ContextServerRegistry { struct RegisteredContextServer { tools: BTreeMap>, + prompts: BTreeMap, load_tools: Task>, + load_prompts: Task>, + _tools_updated_subscription: Option, } impl ContextServerRegistry { @@ -28,6 +43,7 @@ impl ContextServerRegistry { }; for server in server_store.read(cx).running_servers() { this.reload_tools_for_server(server.id(), cx); + this.reload_prompts_for_server(server.id(), cx); } this } @@ -56,6 +72,88 @@ impl ContextServerRegistry { .map(|(id, server)| (id, &server.tools)) } + pub fn prompts(&self) -> impl Iterator { + self.registered_servers + .values() + .flat_map(|server| server.prompts.values()) + } + + pub fn find_prompt( + &self, + server_id: Option<&ContextServerId>, + name: &str, + ) -> Option<&ContextServerPrompt> { + if let Some(server_id) = server_id { + self.registered_servers + .get(server_id) + .and_then(|server| server.prompts.get(name)) + } else { + self.registered_servers + .values() + .find_map(|server| server.prompts.get(name)) + } + } + + pub fn server_store(&self) -> &Entity { + &self.server_store + } + + fn get_or_register_server( + &mut self, + server_id: &ContextServerId, + cx: &mut Context, + ) -> &mut RegisteredContextServer { + self.registered_servers + .entry(server_id.clone()) + .or_insert_with(|| Self::init_registered_server(server_id, &self.server_store, cx)) + } + + fn init_registered_server( + server_id: &ContextServerId, + server_store: &Entity, + cx: &mut Context, + ) -> RegisteredContextServer { + let tools_updated_subscription = server_store + .read(cx) + .get_running_server(server_id) + .and_then(|server| { + let client = server.client()?; + + if !client.capable(context_server::protocol::ServerCapability::Tools) { + return None; + } + + let server_id = server.id(); + let this = cx.entity().downgrade(); + + Some(client.on_notification( + "notifications/tools/list_changed", + Box::new(move |_params, cx: AsyncApp| { + let server_id = server_id.clone(); + let this = this.clone(); + cx.spawn(async move |cx| { + this.update(cx, |this, cx| { + log::info!( + "Received tools/list_changed notification for server {}", + server_id + ); + this.reload_tools_for_server(server_id, cx); + }) + }) + .detach(); + }), + )) + }); + + RegisteredContextServer { + tools: BTreeMap::default(), + prompts: BTreeMap::default(), + load_tools: Task::ready(Ok(())), + load_prompts: Task::ready(Ok(())), + _tools_updated_subscription: tools_updated_subscription, + } + } + fn reload_tools_for_server(&mut self, server_id: ContextServerId, cx: &mut Context) { let Some(server) = self.server_store.read(cx).get_running_server(&server_id) else { return; @@ -63,17 +161,12 @@ impl ContextServerRegistry { let Some(client) = server.client() else { return; }; + if !client.capable(context_server::protocol::ServerCapability::Tools) { return; } - let registered_server = - self.registered_servers - .entry(server_id.clone()) - .or_insert(RegisteredContextServer { - tools: BTreeMap::default(), - load_tools: Task::ready(Ok(())), - }); + let registered_server = self.get_or_register_server(&server_id, cx); registered_server.load_tools = cx.spawn(async move |this, cx| { let response = client .request::(()) @@ -94,6 +187,49 @@ impl ContextServerRegistry { )); registered_server.tools.insert(tool.name(), tool); } + cx.emit(ContextServerRegistryEvent::ToolsChanged); + cx.notify(); + } + }) + }); + } + + fn reload_prompts_for_server(&mut self, server_id: ContextServerId, cx: &mut Context) { + let Some(server) = self.server_store.read(cx).get_running_server(&server_id) else { + return; + }; + let Some(client) = server.client() else { + return; + }; + if !client.capable(context_server::protocol::ServerCapability::Prompts) { + return; + } + + let registered_server = self.get_or_register_server(&server_id, cx); + + registered_server.load_prompts = cx.spawn(async move |this, cx| { + let response = client + .request::(()) + .await; + + this.update(cx, |this, cx| { + let Some(registered_server) = this.registered_servers.get_mut(&server_id) else { + return; + }; + + registered_server.prompts.clear(); + if let Some(response) = response.log_err() { + for prompt in response.prompts { + let name: SharedString = prompt.name.clone().into(); + registered_server.prompts.insert( + name, + ContextServerPrompt { + server_id: server_id.clone(), + prompt, + }, + ); + } + cx.emit(ContextServerRegistryEvent::PromptsChanged); cx.notify(); } }) @@ -112,9 +248,17 @@ impl ContextServerRegistry { ContextServerStatus::Starting => {} ContextServerStatus::Running => { self.reload_tools_for_server(server_id.clone(), cx); + self.reload_prompts_for_server(server_id.clone(), cx); } ContextServerStatus::Stopped | ContextServerStatus::Error(_) => { - self.registered_servers.remove(server_id); + if let Some(registered_server) = self.registered_servers.remove(server_id) { + if !registered_server.tools.is_empty() { + cx.emit(ContextServerRegistryEvent::ToolsChanged); + } + if !registered_server.prompts.is_empty() { + cx.emit(ContextServerRegistryEvent::PromptsChanged); + } + } cx.notify(); } } @@ -251,3 +395,39 @@ impl AnyAgentTool for ContextServerTool { Ok(()) } } + +pub fn get_prompt( + server_store: &Entity, + server_id: &ContextServerId, + prompt_name: &str, + arguments: HashMap, + cx: &mut AsyncApp, +) -> Task> { + let server = match cx.update(|cx| server_store.read(cx).get_running_server(server_id)) { + Ok(server) => server, + Err(error) => return Task::ready(Err(error)), + }; + let Some(server) = server else { + return Task::ready(Err(anyhow::anyhow!("Context server not found"))); + }; + + let Some(protocol) = server.client() else { + return Task::ready(Err(anyhow::anyhow!("Context server not initialized"))); + }; + + let prompt_name = prompt_name.to_string(); + + cx.background_spawn(async move { + let response = protocol + .request::( + context_server::types::PromptsGetParams { + name: prompt_name, + arguments: (!arguments.is_empty()).then(|| arguments), + meta: None, + }, + ) + .await?; + + Ok(response) + }) +} diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index a507044ce51dce5e55c53106c11d8a9b2c2a3d28..3acb7f5951f3ca4b682dcabc62a0d54c35ab08d6 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -273,14 +273,9 @@ impl AgentTool for EditFileTool { }; let abs_path = project.read(cx).absolute_path(&project_path, cx); if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields(ToolCallUpdateFields { - locations: Some(vec![acp::ToolCallLocation { - path: abs_path, - line: None, - meta: None, - }]), - ..Default::default() - }); + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]), + ); } let authorize = self.authorize(&input, &event_stream, cx); @@ -309,6 +304,59 @@ impl AgentTool for EditFileTool { })? .await?; + // Check if the file has been modified since the agent last read it + if let Some(abs_path) = abs_path.as_ref() { + let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| { + let last_read = thread.file_read_times.get(abs_path).copied(); + let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime()); + let dirty = buffer.read(cx).is_dirty(); + let has_save = thread.has_tool("save_file"); + let has_restore = thread.has_tool("restore_file_from_disk"); + (last_read, current, dirty, has_save, has_restore) + })?; + + // Check for unsaved changes first - these indicate modifications we don't know about + if is_dirty { + let message = match (has_save_tool, has_restore_tool) { + (true, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (true, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." + } + (false, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (false, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ + then ask them to save or revert the file manually and inform you when it's ok to proceed." + } + }; + anyhow::bail!("{}", message); + } + + // Check if the file was modified on disk since we last read it + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { + // MTime can be unreliable for comparisons, so our newtype intentionally + // doesn't support comparing them. If the mtime at all different + // (which could be because of a modification or because e.g. system clock changed), + // we pessimistically assume it was modified. + if current != last_read { + anyhow::bail!( + "The file {} has been modified since you last read it. \ + Please read the file again to get the current state before editing it.", + input.path.display() + ); + } + } + } + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?; event_stream.update_diff(diff.clone()); let _finalize_diff = util::defer({ @@ -355,10 +403,7 @@ impl AgentTool for EditFileTool { range.start.to_point(&buffer.snapshot()).row }).ok(); if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields(ToolCallUpdateFields { - locations: Some(vec![ToolCallLocation { path: abs_path, line, meta: None }]), - ..Default::default() - }); + event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path).line(line)])); } emitted_location = true; } @@ -421,6 +466,17 @@ impl AgentTool for EditFileTool { log.buffer_edited(buffer.clone(), cx); })?; + // Update the recorded read time after a successful edit so consecutive edits work + if let Some(abs_path) = abs_path.as_ref() { + if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { + buffer.file().and_then(|file| file.disk_state().mtime()) + })? { + self.thread.update(cx, |thread, _| { + thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); + })?; + } + } + let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; let (new_text, unified_diff) = cx .background_spawn({ @@ -1748,10 +1804,438 @@ mod tests { } } + #[gpui::test] + async fn test_file_read_times_tracking(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "test.txt": "original content" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + // Initially, file_read_times should be empty + let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty()); + assert!(is_empty, "file_read_times should start empty"); + + // Create read tool + let read_tool = Arc::new(crate::ReadFileTool::new( + thread.downgrade(), + project.clone(), + action_log, + )); + + // Read the file to record the read time + cx.update(|cx| { + read_tool.clone().run( + crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Verify that file_read_times now contains an entry for the file + let has_entry = thread.read_with(cx, |thread, _| { + thread.file_read_times.len() == 1 + && thread + .file_read_times + .keys() + .any(|path| path.ends_with("test.txt")) + }); + assert!( + has_entry, + "file_read_times should contain an entry after reading the file" + ); + + // Read the file again - should update the entry + cx.update(|cx| { + read_tool.clone().run( + crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Should still have exactly one entry + let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1); + assert!( + has_one_entry, + "file_read_times should still have one entry after re-reading" + ); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); }); } + + #[gpui::test] + async fn test_consecutive_edits_work(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "test.txt": "original content" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let languages = project.read_with(cx, |project, _| project.languages().clone()); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let read_tool = Arc::new(crate::ReadFileTool::new( + thread.downgrade(), + project.clone(), + action_log, + )); + let edit_tool = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + languages, + Templates::new(), + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // First edit should work + let edit_result = { + let edit_task = cx.update(|cx| { + edit_tool.clone().run( + EditFileToolInput { + display_description: "First edit".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + }, + ToolCallEventStream::test().0, + cx, + ) + }); + + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk( + "original contentmodified content" + .to_string(), + ); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!( + edit_result.is_ok(), + "First edit should succeed, got error: {:?}", + edit_result.as_ref().err() + ); + + // Second edit should also work because the edit updated the recorded read time + let edit_result = { + let edit_task = cx.update(|cx| { + edit_tool.clone().run( + EditFileToolInput { + display_description: "Second edit".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + }, + ToolCallEventStream::test().0, + cx, + ) + }); + + cx.executor().run_until_parked(); + model.send_last_completion_stream_text_chunk( + "modified contentfurther modified content".to_string(), + ); + model.end_last_completion_stream(); + + edit_task.await + }; + assert!( + edit_result.is_ok(), + "Second consecutive edit should succeed, got error: {:?}", + edit_result.as_ref().err() + ); + } + + #[gpui::test] + async fn test_external_modification_detected(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "test.txt": "original content" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let languages = project.read_with(cx, |project, _| project.languages().clone()); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let read_tool = Arc::new(crate::ReadFileTool::new( + thread.downgrade(), + project.clone(), + action_log, + )); + let edit_tool = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + languages, + Templates::new(), + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Simulate external modification - advance time and save file + cx.background_executor + .advance_clock(std::time::Duration::from_secs(2)); + fs.save( + path!("/root/test.txt").as_ref(), + &"externally modified content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + // Reload the buffer to pick up the new mtime + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + buffer + .update(cx, |buffer, cx| buffer.reload(cx)) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + // Try to edit - should fail because file was modified externally + let result = cx + .update(|cx| { + edit_tool.clone().run( + EditFileToolInput { + display_description: "Edit after external change".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + assert!( + result.is_err(), + "Edit should fail after external modification" + ); + let error_msg = result.unwrap_err().to_string(); + assert!( + error_msg.contains("has been modified since you last read it"), + "Error should mention file modification, got: {}", + error_msg + ); + } + + #[gpui::test] + async fn test_dirty_buffer_detected(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "test.txt": "original content" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model.clone()), + cx, + ) + }); + let languages = project.read_with(cx, |project, _| project.languages().clone()); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let read_tool = Arc::new(crate::ReadFileTool::new( + thread.downgrade(), + project.clone(), + action_log, + )); + let edit_tool = Arc::new(EditFileTool::new( + project.clone(), + thread.downgrade(), + languages, + Templates::new(), + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Open the buffer and make it dirty by editing without saving + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + + // Make an in-memory edit to the buffer (making it dirty) + buffer.update(cx, |buffer, cx| { + let end_point = buffer.max_point(); + buffer.edit([(end_point..end_point, " added text")], None, cx); + }); + + // Verify buffer is dirty + let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); + assert!(is_dirty, "Buffer should be dirty after in-memory edit"); + + // Try to edit - should fail because buffer has unsaved changes + let result = cx + .update(|cx| { + edit_tool.clone().run( + EditFileToolInput { + display_description: "Edit with dirty buffer".into(), + path: "root/test.txt".into(), + mode: EditFileMode::Edit, + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + assert!(result.is_err(), "Edit should fail when buffer is dirty"); + let error_msg = result.unwrap_err().to_string(); + assert!( + error_msg.contains("This file has unsaved changes."), + "Error should mention unsaved changes, got: {}", + error_msg + ); + assert!( + error_msg.contains("keep or discard"), + "Error should ask whether to keep or discard changes, got: {}", + error_msg + ); + // Since save_file and restore_file_from_disk tools aren't added to the thread, + // the error message should ask the user to manually save or revert + assert!( + error_msg.contains("save or revert the file manually"), + "Error should ask user to manually save or revert when tools aren't available, got: {}", + error_msg + ); + } } diff --git a/crates/agent/src/tools/find_path_tool.rs b/crates/agent/src/tools/find_path_tool.rs index 41954ee31b2a4529e75541e78eff278a521307da..2a33b14b4c87d87154e2aa1ee25363b397189f89 100644 --- a/crates/agent/src/tools/find_path_tool.rs +++ b/crates/agent/src/tools/find_path_tool.rs @@ -118,33 +118,29 @@ impl AgentTool for FindPathTool { let paginated_matches: &[PathBuf] = &matches[cmp::min(input.offset, matches.len()) ..cmp::min(input.offset + RESULTS_PER_PAGE, matches.len())]; - event_stream.update_fields(acp::ToolCallUpdateFields { - title: Some(if paginated_matches.is_empty() { - "No matches".into() - } else if paginated_matches.len() == 1 { - "1 match".into() - } else { - format!("{} matches", paginated_matches.len()) - }), - content: Some( - paginated_matches - .iter() - .map(|path| acp::ToolCallContent::Content { - content: acp::ContentBlock::ResourceLink(acp::ResourceLink { - uri: format!("file://{}", path.display()), - name: path.to_string_lossy().into(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }), - }) - .collect(), - ), - ..Default::default() - }); + event_stream.update_fields( + acp::ToolCallUpdateFields::new() + .title(if paginated_matches.is_empty() { + "No matches".into() + } else if paginated_matches.len() == 1 { + "1 match".into() + } else { + format!("{} matches", paginated_matches.len()) + }) + .content( + paginated_matches + .iter() + .map(|path| { + acp::ToolCallContent::Content(acp::Content::new( + acp::ContentBlock::ResourceLink(acp::ResourceLink::new( + path.to_string_lossy(), + format!("file://{}", path.display()), + )), + )) + }) + .collect::>(), + ), + ); Ok(FindPathToolOutput { offset: input.offset, @@ -177,7 +173,7 @@ fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task + /// If the project has the following root directories: + /// + /// - /a/b/backend + /// - /c/d/frontend + /// + /// Use "backend/**/*.rs" to search only Rust files in the backend root directory. + /// Use "frontend/src/**/*.ts" to search TypeScript files only in the frontend root directory (sub-directory "src"). + /// Use "**/*.rs" to search Rust files across all root directories. + /// pub include_pattern: Option, /// Optional starting position for paginated results (0-based). /// When not provided, starts from the beginning. @@ -132,8 +145,7 @@ impl AgentTool for GrepTool { let exclude_patterns = global_settings .file_scan_exclusions .sources() - .iter() - .chain(global_settings.private_files.sources().iter()); + .chain(global_settings.private_files.sources()); match PathMatcher::new(exclude_patterns, path_style) { Ok(matcher) => matcher, @@ -310,7 +322,6 @@ mod tests { use super::*; use gpui::{TestAppContext, UpdateGlobal}; - use language::{Language, LanguageConfig, LanguageMatcher}; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; @@ -552,7 +563,7 @@ mod tests { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _cx| { - project.languages().add(rust_lang().into()) + project.languages().add(language::rust_lang()) }); project @@ -781,22 +792,6 @@ mod tests { }); } - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../../languages/src/rust/outline.scm")) - .unwrap() - } - #[gpui::test] async fn test_grep_security_boundaries(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index 52f88aa4db03a2bc01b0fd10fe99f8bad04c24f1..acfd4a16746fc1f78fd388f5dacf3e360f070ab5 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -1,7 +1,7 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; -use gpui::{App, Entity, SharedString, Task}; +use gpui::{App, Entity, SharedString, Task, WeakEntity}; use indoc::formatdoc; use language::Point; use language_model::{LanguageModelImage, LanguageModelToolResultContent}; @@ -12,11 +12,14 @@ use settings::Settings; use std::sync::Arc; use util::markdown::MarkdownCodeBlock; -use crate::{AgentTool, ToolCallEventStream, outline}; +use crate::{AgentTool, Thread, ToolCallEventStream, outline}; /// Reads the content of the given file in the project. /// /// - Never attempt to read a path that hasn't been previously mentioned. +/// - For large files, this tool returns a file outline with symbol names and line numbers instead of the full content. +/// This outline IS a successful response - use the line numbers to read specific sections with start_line/end_line. +/// Do NOT retry reading the same file without line numbers if you receive an outline. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct ReadFileToolInput { /// The relative path of the file to read. @@ -42,13 +45,19 @@ pub struct ReadFileToolInput { } pub struct ReadFileTool { + thread: WeakEntity, project: Entity, action_log: Entity, } impl ReadFileTool { - pub fn new(project: Entity, action_log: Entity) -> Self { + pub fn new( + thread: WeakEntity, + project: Entity, + action_log: Entity, + ) -> Self { Self { + thread, project, action_log, } @@ -144,14 +153,10 @@ impl AgentTool for ReadFileTool { let file_path = input.path.clone(); - event_stream.update_fields(ToolCallUpdateFields { - locations: Some(vec![acp::ToolCallLocation { - path: abs_path.clone(), - line: input.start_line.map(|line| line.saturating_sub(1)), - meta: None, - }]), - ..Default::default() - }); + event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![ + acp::ToolCallLocation::new(&abs_path) + .line(input.start_line.map(|line| line.saturating_sub(1))), + ])); if image_store::is_image_file(&self.project, &project_path, cx) { return cx.spawn(async move |cx| { @@ -195,6 +200,17 @@ impl AgentTool for ReadFileTool { anyhow::bail!("{file_path} not found"); } + // Record the file read time and mtime + if let Some(mtime) = buffer.read_with(cx, |buffer, _| { + buffer.file().and_then(|file| file.disk_state().mtime()) + })? { + self.thread + .update(cx, |thread, _| { + thread.file_read_times.insert(abs_path.to_path_buf(), mtime); + }) + .ok(); + } + let mut anchor = None; // Check if specific line ranges are provided @@ -237,16 +253,15 @@ impl AgentTool for ReadFileTool { if buffer_content.is_outline { Ok(formatdoc! {" - This file was too big to read all at once. + SUCCESS: File outline retrieved. This file is too large to read all at once, so the outline below shows the file's structure with line numbers. - {} + IMPORTANT: Do NOT retry this call without line numbers - you will get the same outline. + Instead, use the line numbers below to read specific sections by calling this tool again with start_line and end_line parameters. - Using the line numbers in this outline, you can call this tool again - while specifying the start_line and end_line fields to see the - implementations of symbols in the outline. + {} - Alternatively, you can fall back to the `grep` tool (if available) - to search the file for specific content.", buffer_content.text + NEXT STEPS: To read a specific symbol's implementation, call read_file with the same path plus start_line and end_line from the outline above. + For example, to read a function shown as [L100-150], use start_line: 100 and end_line: 150.", buffer_content.text } .into()) } else { @@ -258,7 +273,9 @@ impl AgentTool for ReadFileTool { project.set_agent_location( Some(AgentLocation { buffer: buffer.downgrade(), - position: anchor.unwrap_or(text::Anchor::MIN), + position: anchor.unwrap_or_else(|| { + text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + }), }), cx, ); @@ -268,12 +285,9 @@ impl AgentTool for ReadFileTool { text, } .to_string(); - event_stream.update_fields(ToolCallUpdateFields { - content: Some(vec![acp::ToolCallContent::Content { - content: markdown.into(), - }]), - ..Default::default() - }) + event_stream.update_fields(ToolCallUpdateFields::new().content(vec![ + acp::ToolCallContent::Content(acp::Content::new(markdown)), + ])); } })?; @@ -285,11 +299,14 @@ impl AgentTool for ReadFileTool { #[cfg(test)] mod test { use super::*; + use crate::{ContextServerRegistry, Templates, Thread}; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; - use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; + use language_model::fake_provider::FakeLanguageModel; use project::{FakeFs, Project}; + use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; + use std::sync::Arc; use util::path; #[gpui::test] @@ -300,7 +317,20 @@ mod test { fs.insert_tree(path!("/root"), json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); let (event_stream, _) = ToolCallEventStream::test(); let result = cx @@ -333,7 +363,20 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -361,9 +404,22 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(Arc::new(rust_lang())); + language_registry.add(language::rust_lang()); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -378,7 +434,7 @@ mod test { let content = result.to_str().unwrap(); assert_eq!( - content.lines().skip(4).take(6).collect::>(), + content.lines().skip(7).take(6).collect::>(), vec![ "struct Test0 [L1-4]", " a [L2]", @@ -413,7 +469,7 @@ mod test { pretty_assertions::assert_eq!( content .lines() - .skip(4) + .skip(7) .take(expected_content.len()) .collect::>(), expected_content @@ -435,7 +491,20 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -463,7 +532,20 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); // start_line of 0 should be treated as 1 let result = cx @@ -512,49 +594,6 @@ mod test { }); } - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query( - r#" - (line_comment) @annotation - - (struct_item - "struct" @context - name: (_) @name) @item - (enum_item - "enum" @context - name: (_) @name) @item - (enum_variant - name: (_) @name) @item - (field_declaration - name: (_) @name) @item - (impl_item - "impl" @context - trait: (_)? @name - "for"? @context - type: (_) @name - body: (_ "{" (_)* "}")) @item - (function_item - "fn" @context - name: (_) @name) @item - (mod_item - "mod" @context - name: (_) @name) @item - "#, - ) - .unwrap() - } - #[gpui::test] async fn test_read_file_security(cx: &mut TestAppContext) { init_test(cx); @@ -607,7 +646,20 @@ mod test { let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project, action_log)); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); // Reading a file outside the project worktree should fail let result = cx @@ -821,7 +873,24 @@ mod test { .await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let tool = Arc::new(ReadFileTool::new( + thread.downgrade(), + project.clone(), + action_log.clone(), + )); // Test reading allowed files in worktree1 let result = cx diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..f5723f6ee3ee46144152dd3ed2939ab2cfaca9c0 --- /dev/null +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -0,0 +1,352 @@ +use agent_client_protocol as acp; +use anyhow::Result; +use collections::FxHashSet; +use gpui::{App, Entity, SharedString, Task}; +use language::Buffer; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use std::sync::Arc; + +use crate::{AgentTool, ToolCallEventStream}; + +/// Discards unsaved changes in open buffers by reloading file contents from disk. +/// +/// Use this tool when: +/// - You attempted to edit files but they have unsaved changes the user does not want to keep. +/// - You want to reset files to the on-disk state before retrying an edit. +/// +/// Only use this tool after asking the user for permission, because it will discard unsaved changes. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct RestoreFileFromDiskToolInput { + /// The paths of the files to restore from disk. + pub paths: Vec, +} + +pub struct RestoreFileFromDiskTool { + project: Entity, +} + +impl RestoreFileFromDiskTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for RestoreFileFromDiskTool { + type Input = RestoreFileFromDiskToolInput; + type Output = String; + + fn name() -> &'static str { + "restore_file_from_disk" + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + match input { + Ok(input) if input.paths.len() == 1 => "Restore file from disk".into(), + Ok(input) => format!("Restore {} files from disk", input.paths.len()).into(), + Err(_) => "Restore files from disk".into(), + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + let input_paths = input.paths; + + cx.spawn(async move |cx| { + let mut buffers_to_reload: FxHashSet> = FxHashSet::default(); + + let mut restored_paths: Vec = Vec::new(); + let mut clean_paths: Vec = Vec::new(); + let mut not_found_paths: Vec = Vec::new(); + let mut open_errors: Vec<(PathBuf, String)> = Vec::new(); + let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); + let mut reload_errors: Vec = Vec::new(); + + for path in input_paths { + let project_path = + project.read_with(cx, |project, cx| project.find_project_path(&path, cx)); + + let project_path = match project_path { + Ok(Some(project_path)) => project_path, + Ok(None) => { + not_found_paths.push(path); + continue; + } + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }; + + let open_buffer_task = + project.update(cx, |project, cx| project.open_buffer(project_path, cx)); + + let buffer = match open_buffer_task { + Ok(task) => match task.await { + Ok(buffer) => buffer, + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }, + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }; + + let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) { + Ok(is_dirty) => is_dirty, + Err(error) => { + dirty_check_errors.push((path, error.to_string())); + continue; + } + }; + + if is_dirty { + buffers_to_reload.insert(buffer); + restored_paths.push(path); + } else { + clean_paths.push(path); + } + } + + if !buffers_to_reload.is_empty() { + let reload_task = project.update(cx, |project, cx| { + project.reload_buffers(buffers_to_reload, true, cx) + }); + + match reload_task { + Ok(task) => { + if let Err(error) = task.await { + reload_errors.push(error.to_string()); + } + } + Err(error) => { + reload_errors.push(error.to_string()); + } + } + } + + let mut lines: Vec = Vec::new(); + + if !restored_paths.is_empty() { + lines.push(format!("Restored {} file(s).", restored_paths.len())); + } + if !clean_paths.is_empty() { + lines.push(format!("{} clean.", clean_paths.len())); + } + + if !not_found_paths.is_empty() { + lines.push(format!("Not found ({}):", not_found_paths.len())); + for path in ¬_found_paths { + lines.push(format!("- {}", path.display())); + } + } + if !open_errors.is_empty() { + lines.push(format!("Open failed ({}):", open_errors.len())); + for (path, error) in &open_errors { + lines.push(format!("- {}: {}", path.display(), error)); + } + } + if !dirty_check_errors.is_empty() { + lines.push(format!( + "Dirty check failed ({}):", + dirty_check_errors.len() + )); + for (path, error) in &dirty_check_errors { + lines.push(format!("- {}: {}", path.display(), error)); + } + } + if !reload_errors.is_empty() { + lines.push(format!("Reload failed ({}):", reload_errors.len())); + for error in &reload_errors { + lines.push(format!("- {}", error)); + } + } + + if lines.is_empty() { + Ok("No paths provided.".to_string()) + } else { + Ok(lines.join("\n")) + } + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use fs::Fs; + use gpui::TestAppContext; + use language::LineEnding; + use project::FakeFs; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_restore_file_from_disk_output_and_effects(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dirty.txt": "on disk: dirty\n", + "clean.txt": "on disk: clean\n", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let tool = Arc::new(RestoreFileFromDiskTool::new(project.clone())); + + // Make dirty.txt dirty in-memory by saving different content into the buffer without saving to disk. + let dirty_project_path = project.read_with(cx, |project, cx| { + project + .find_project_path("root/dirty.txt", cx) + .expect("dirty.txt should exist in project") + }); + + let dirty_buffer = project + .update(cx, |project, cx| { + project.open_buffer(dirty_project_path, cx) + }) + .await + .unwrap(); + dirty_buffer.update(cx, |buffer, cx| { + buffer.edit([(0..buffer.len(), "in memory: dirty\n")], None, cx); + }); + assert!( + dirty_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "dirty.txt buffer should be dirty before restore" + ); + + // Ensure clean.txt is opened but remains clean. + let clean_project_path = project.read_with(cx, |project, cx| { + project + .find_project_path("root/clean.txt", cx) + .expect("clean.txt should exist in project") + }); + + let clean_buffer = project + .update(cx, |project, cx| { + project.open_buffer(clean_project_path, cx) + }) + .await + .unwrap(); + assert!( + !clean_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "clean.txt buffer should start clean" + ); + + let output = cx + .update(|cx| { + tool.clone().run( + RestoreFileFromDiskToolInput { + paths: vec![ + PathBuf::from("root/dirty.txt"), + PathBuf::from("root/clean.txt"), + ], + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Output should mention restored + clean. + assert!( + output.contains("Restored 1 file(s)."), + "expected restored count line, got:\n{output}" + ); + assert!( + output.contains("1 clean."), + "expected clean count line, got:\n{output}" + ); + + // Effect: dirty buffer should be restored back to disk content and become clean. + let dirty_text = dirty_buffer.read_with(cx, |buffer, _| buffer.text()); + assert_eq!( + dirty_text, "on disk: dirty\n", + "dirty.txt buffer should be restored to disk contents" + ); + assert!( + !dirty_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "dirty.txt buffer should not be dirty after restore" + ); + + // Disk contents should be unchanged (restore-from-disk should not write). + let disk_dirty = fs.load(path!("/root/dirty.txt").as_ref()).await.unwrap(); + assert_eq!(disk_dirty, "on disk: dirty\n"); + + // Sanity: clean buffer should remain clean and unchanged. + let clean_text = clean_buffer.read_with(cx, |buffer, _| buffer.text()); + assert_eq!(clean_text, "on disk: clean\n"); + assert!( + !clean_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "clean.txt buffer should remain clean" + ); + + // Test empty paths case. + let output = cx + .update(|cx| { + tool.clone().run( + RestoreFileFromDiskToolInput { paths: vec![] }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + assert_eq!(output, "No paths provided."); + + // Test not-found path case (path outside the project root). + let output = cx + .update(|cx| { + tool.clone().run( + RestoreFileFromDiskToolInput { + paths: vec![PathBuf::from("nonexistent/path.txt")], + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + assert!( + output.contains("Not found (1):"), + "expected not-found header line, got:\n{output}" + ); + assert!( + output.contains("- nonexistent/path.txt"), + "expected not-found path bullet, got:\n{output}" + ); + + let _ = LineEnding::Unix; // keep import used if the buffer edit API changes + } +} diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..429352200109c52303c9f6f94a28a49136af1a61 --- /dev/null +++ b/crates/agent/src/tools/save_file_tool.rs @@ -0,0 +1,351 @@ +use agent_client_protocol as acp; +use anyhow::Result; +use collections::FxHashSet; +use gpui::{App, Entity, SharedString, Task}; +use language::Buffer; +use project::Project; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use std::sync::Arc; + +use crate::{AgentTool, ToolCallEventStream}; + +/// Saves files that have unsaved changes. +/// +/// Use this tool when you need to edit files but they have unsaved changes that must be saved first. +/// Only use this tool after asking the user for permission to save their unsaved changes. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct SaveFileToolInput { + /// The paths of the files to save. + pub paths: Vec, +} + +pub struct SaveFileTool { + project: Entity, +} + +impl SaveFileTool { + pub fn new(project: Entity) -> Self { + Self { project } + } +} + +impl AgentTool for SaveFileTool { + type Input = SaveFileToolInput; + type Output = String; + + fn name() -> &'static str { + "save_file" + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + match input { + Ok(input) if input.paths.len() == 1 => "Save file".into(), + Ok(input) => format!("Save {} files", input.paths.len()).into(), + Err(_) => "Save files".into(), + } + } + + fn run( + self: Arc, + input: Self::Input, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let project = self.project.clone(); + let input_paths = input.paths; + + cx.spawn(async move |cx| { + let mut buffers_to_save: FxHashSet> = FxHashSet::default(); + + let mut saved_paths: Vec = Vec::new(); + let mut clean_paths: Vec = Vec::new(); + let mut not_found_paths: Vec = Vec::new(); + let mut open_errors: Vec<(PathBuf, String)> = Vec::new(); + let mut dirty_check_errors: Vec<(PathBuf, String)> = Vec::new(); + let mut save_errors: Vec<(String, String)> = Vec::new(); + + for path in input_paths { + let project_path = + project.read_with(cx, |project, cx| project.find_project_path(&path, cx)); + + let project_path = match project_path { + Ok(Some(project_path)) => project_path, + Ok(None) => { + not_found_paths.push(path); + continue; + } + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }; + + let open_buffer_task = + project.update(cx, |project, cx| project.open_buffer(project_path, cx)); + + let buffer = match open_buffer_task { + Ok(task) => match task.await { + Ok(buffer) => buffer, + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }, + Err(error) => { + open_errors.push((path, error.to_string())); + continue; + } + }; + + let is_dirty = match buffer.read_with(cx, |buffer, _| buffer.is_dirty()) { + Ok(is_dirty) => is_dirty, + Err(error) => { + dirty_check_errors.push((path, error.to_string())); + continue; + } + }; + + if is_dirty { + buffers_to_save.insert(buffer); + saved_paths.push(path); + } else { + clean_paths.push(path); + } + } + + // Save each buffer individually since there's no batch save API. + for buffer in buffers_to_save { + let path_for_buffer = match buffer.read_with(cx, |buffer, _| { + buffer + .file() + .map(|file| file.path().to_rel_path_buf()) + .map(|path| path.as_rel_path().as_unix_str().to_owned()) + }) { + Ok(path) => path.unwrap_or_else(|| "".to_string()), + Err(error) => { + save_errors.push(("".to_string(), error.to_string())); + continue; + } + }; + + let save_task = project.update(cx, |project, cx| project.save_buffer(buffer, cx)); + + match save_task { + Ok(task) => { + if let Err(error) = task.await { + save_errors.push((path_for_buffer, error.to_string())); + } + } + Err(error) => { + save_errors.push((path_for_buffer, error.to_string())); + } + } + } + + let mut lines: Vec = Vec::new(); + + if !saved_paths.is_empty() { + lines.push(format!("Saved {} file(s).", saved_paths.len())); + } + if !clean_paths.is_empty() { + lines.push(format!("{} clean.", clean_paths.len())); + } + + if !not_found_paths.is_empty() { + lines.push(format!("Not found ({}):", not_found_paths.len())); + for path in ¬_found_paths { + lines.push(format!("- {}", path.display())); + } + } + if !open_errors.is_empty() { + lines.push(format!("Open failed ({}):", open_errors.len())); + for (path, error) in &open_errors { + lines.push(format!("- {}: {}", path.display(), error)); + } + } + if !dirty_check_errors.is_empty() { + lines.push(format!( + "Dirty check failed ({}):", + dirty_check_errors.len() + )); + for (path, error) in &dirty_check_errors { + lines.push(format!("- {}: {}", path.display(), error)); + } + } + if !save_errors.is_empty() { + lines.push(format!("Save failed ({}):", save_errors.len())); + for (path, error) in &save_errors { + lines.push(format!("- {}: {}", path, error)); + } + } + + if lines.is_empty() { + Ok("No paths provided.".to_string()) + } else { + Ok(lines.join("\n")) + } + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use fs::Fs; + use gpui::TestAppContext; + use project::FakeFs; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_save_file_output_and_effects(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dirty.txt": "on disk: dirty\n", + "clean.txt": "on disk: clean\n", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let tool = Arc::new(SaveFileTool::new(project.clone())); + + // Make dirty.txt dirty in-memory. + let dirty_project_path = project.read_with(cx, |project, cx| { + project + .find_project_path("root/dirty.txt", cx) + .expect("dirty.txt should exist in project") + }); + + let dirty_buffer = project + .update(cx, |project, cx| { + project.open_buffer(dirty_project_path, cx) + }) + .await + .unwrap(); + dirty_buffer.update(cx, |buffer, cx| { + buffer.edit([(0..buffer.len(), "in memory: dirty\n")], None, cx); + }); + assert!( + dirty_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "dirty.txt buffer should be dirty before save" + ); + + // Ensure clean.txt is opened but remains clean. + let clean_project_path = project.read_with(cx, |project, cx| { + project + .find_project_path("root/clean.txt", cx) + .expect("clean.txt should exist in project") + }); + + let clean_buffer = project + .update(cx, |project, cx| { + project.open_buffer(clean_project_path, cx) + }) + .await + .unwrap(); + assert!( + !clean_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "clean.txt buffer should start clean" + ); + + let output = cx + .update(|cx| { + tool.clone().run( + SaveFileToolInput { + paths: vec![ + PathBuf::from("root/dirty.txt"), + PathBuf::from("root/clean.txt"), + ], + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Output should mention saved + clean. + assert!( + output.contains("Saved 1 file(s)."), + "expected saved count line, got:\n{output}" + ); + assert!( + output.contains("1 clean."), + "expected clean count line, got:\n{output}" + ); + + // Effect: dirty buffer should now be clean and disk should have new content. + assert!( + !dirty_buffer.read_with(cx, |buffer, _| buffer.is_dirty()), + "dirty.txt buffer should not be dirty after save" + ); + + let disk_dirty = fs.load(path!("/root/dirty.txt").as_ref()).await.unwrap(); + assert_eq!( + disk_dirty, "in memory: dirty\n", + "dirty.txt disk content should be updated" + ); + + // Sanity: clean buffer should remain clean and disk unchanged. + let disk_clean = fs.load(path!("/root/clean.txt").as_ref()).await.unwrap(); + assert_eq!(disk_clean, "on disk: clean\n"); + + // Test empty paths case. + let output = cx + .update(|cx| { + tool.clone().run( + SaveFileToolInput { paths: vec![] }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + assert_eq!(output, "No paths provided."); + + // Test not-found path case. + let output = cx + .update(|cx| { + tool.clone().run( + SaveFileToolInput { + paths: vec![PathBuf::from("nonexistent/path.txt")], + }, + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + assert!( + output.contains("Not found (1):"), + "expected not-found header line, got:\n{output}" + ); + assert!( + output.contains("- nonexistent/path.txt"), + "expected not-found path bullet, got:\n{output}" + ); + } +} diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 6d30c19152001deaef5deeacbdf266e28ac03d08..f3302fb1894612287bf04acfbfa301188bf853fb 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -1,6 +1,7 @@ use agent_client_protocol as acp; use anyhow::Result; -use gpui::{App, Entity, SharedString, Task}; +use futures::FutureExt as _; +use gpui::{App, AppContext, Entity, SharedString, Task}; use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -8,6 +9,7 @@ use std::{ path::{Path, PathBuf}, rc::Rc, sync::Arc, + time::Duration, }; use util::markdown::MarkdownInlineCode; @@ -25,13 +27,17 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// /// Do not use this tool for commands that run indefinitely, such as servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers that don't terminate on their own. /// +/// For potentially long-running commands, prefer specifying `timeout_ms` to bound runtime and prevent indefinite hangs. +/// /// Remember that each invocation of this tool will spawn a new shell process, so you can't rely on any state from previous invocations. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct TerminalToolInput { /// The one-liner command to execute. - command: String, + pub command: String, /// Working directory for the command. This must be one of the root directories of the project. - cd: String, + pub cd: String, + /// Optional maximum runtime (in milliseconds). If exceeded, the running terminal task is killed. + pub timeout_ms: Option, } pub struct TerminalTool { @@ -112,12 +118,30 @@ impl AgentTool for TerminalTool { .await?; let terminal_id = terminal.id(cx)?; - event_stream.update_fields(acp::ToolCallUpdateFields { - content: Some(vec![acp::ToolCallContent::Terminal { terminal_id }]), - ..Default::default() - }); + event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![ + acp::ToolCallContent::Terminal(acp::Terminal::new(terminal_id)), + ])); + + let timeout = input.timeout_ms.map(Duration::from_millis); + + let exit_status = match timeout { + Some(timeout) => { + let wait_for_exit = terminal.wait_for_exit(cx)?; + let timeout_task = cx.background_spawn(async move { + smol::Timer::after(timeout).await; + }); + + futures::select! { + status = wait_for_exit.clone().fuse() => status, + _ = timeout_task.fuse() => { + terminal.kill(cx)?; + wait_for_exit.await + } + } + } + None => terminal.wait_for_exit(cx)?.await, + }; - let exit_status = terminal.wait_for_exit(cx)?.await; let output = terminal.current_output(cx)?; Ok(process_content(output, &input.command, exit_status)) diff --git a/crates/agent/src/tools/thinking_tool.rs b/crates/agent/src/tools/thinking_tool.rs index 0a68f7545f81ce3202c110b1435d33b57adf409c..96024326f6f1610f500972b1a98be45258e3966b 100644 --- a/crates/agent/src/tools/thinking_tool.rs +++ b/crates/agent/src/tools/thinking_tool.rs @@ -43,10 +43,8 @@ impl AgentTool for ThinkingTool { event_stream: ToolCallEventStream, _cx: &mut App, ) -> Task> { - event_stream.update_fields(acp::ToolCallUpdateFields { - content: Some(vec![input.content.into()]), - ..Default::default() - }); + event_stream + .update_fields(acp::ToolCallUpdateFields::new().content(vec![input.content.into()])); Task::ready(Ok("Finished thinking.".to_string())) } } diff --git a/crates/agent/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs index 03e9db6601579e082e4d83de50f1999209d9f197..eb4ebacea2a8e48d6efa9032f46b336ca30c39b6 100644 --- a/crates/agent/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -76,10 +76,8 @@ impl AgentTool for WebSearchTool { let response = match search_task.await { Ok(response) => response, Err(err) => { - event_stream.update_fields(acp::ToolCallUpdateFields { - title: Some("Web Search Failed".to_string()), - ..Default::default() - }); + event_stream + .update_fields(acp::ToolCallUpdateFields::new().title("Web Search Failed")); return Err(err); } }; @@ -107,26 +105,23 @@ fn emit_update(response: &WebSearchResponse, event_stream: &ToolCallEventStream) } else { format!("{} results", response.results.len()) }; - event_stream.update_fields(acp::ToolCallUpdateFields { - title: Some(format!("Searched the web: {result_text}")), - content: Some( - response - .results - .iter() - .map(|result| acp::ToolCallContent::Content { - content: acp::ContentBlock::ResourceLink(acp::ResourceLink { - name: result.title.clone(), - uri: result.url.clone(), - title: Some(result.title.clone()), - description: Some(result.text.clone()), - mime_type: None, - annotations: None, - size: None, - meta: None, - }), - }) - .collect(), - ), - ..Default::default() - }); + event_stream.update_fields( + acp::ToolCallUpdateFields::new() + .title(format!("Searched the web: {result_text}")) + .content( + response + .results + .iter() + .map(|result| { + acp::ToolCallContent::Content(acp::Content::new( + acp::ContentBlock::ResourceLink( + acp::ResourceLink::new(result.title.clone(), result.url.clone()) + .title(result.title.clone()) + .description(result.text.clone()), + ), + )) + }) + .collect::>(), + ), + ); } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 15f56bf2ed4ee100fd22dc0d7df73f2e8a3274ea..e99855fe8a7241468e93f01fe6c7b6fee161f600 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -9,6 +9,8 @@ use futures::io::BufReader; use project::Project; use project::agent_server_store::AgentServerCommand; use serde::Deserialize; +use settings::Settings as _; +use task::ShellBuilder; use util::ResultExt as _; use std::path::PathBuf; @@ -21,7 +23,7 @@ use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntit use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent}; use terminal::TerminalBuilder; -use terminal::terminal_settings::{AlternateScroll, CursorShape}; +use terminal::terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; #[derive(Debug, Error)] #[error("Unsupported version")] @@ -29,12 +31,13 @@ pub struct UnsupportedVersion; pub struct AcpConnection { server_name: SharedString, - telemetry_id: &'static str, + telemetry_id: SharedString, connection: Rc, sessions: Rc>>, auth_methods: Vec, agent_capabilities: acp::AgentCapabilities, default_mode: Option, + default_model: Option, root_dir: PathBuf, // NB: Don't move this into the wait_task, since we need to ensure the process is // killed on drop (setting kill_on_drop on the command seems to not always work). @@ -53,19 +56,19 @@ pub struct AcpSession { pub async fn connect( server_name: SharedString, - telemetry_id: &'static str, command: AgentServerCommand, root_dir: &Path, default_mode: Option, + default_model: Option, is_remote: bool, cx: &mut AsyncApp, ) -> Result> { let conn = AcpConnection::stdio( server_name, - telemetry_id, command.clone(), root_dir, default_mode, + default_model, is_remote, cx, ) @@ -73,21 +76,23 @@ pub async fn connect( Ok(Rc::new(conn) as _) } -const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1; +const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1; impl AcpConnection { pub async fn stdio( server_name: SharedString, - telemetry_id: &'static str, command: AgentServerCommand, root_dir: &Path, default_mode: Option, + default_model: Option, is_remote: bool, cx: &mut AsyncApp, ) -> Result { - let mut child = util::command::new_smol_command(&command.path); + let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?; + let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive(); + let mut child = + builder.build_command(Some(command.path.display().to_string()), &command.args); child - .args(command.args.iter().map(|arg| arg.as_str())) .envs(command.env.iter().flatten()) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) @@ -170,34 +175,38 @@ impl AcpConnection { })?; let response = connection - .initialize(acp::InitializeRequest { - protocol_version: acp::VERSION, - client_capabilities: acp::ClientCapabilities { - fs: acp::FileSystemCapability { - read_text_file: true, - write_text_file: true, - meta: None, - }, - terminal: true, - meta: Some(serde_json::json!({ - // Experimental: Allow for rendering terminal output from the agents - "terminal_output": true, - "terminal-auth": true, - })), - }, - client_info: Some(acp::Implementation { - name: "zed".to_owned(), - title: release_channel.map(|c| c.to_owned()), - version, - }), - meta: None, - }) + .initialize( + acp::InitializeRequest::new(acp::ProtocolVersion::V1) + .client_capabilities( + acp::ClientCapabilities::new() + .fs(acp::FileSystemCapability::new() + .read_text_file(true) + .write_text_file(true)) + .terminal(true) + // Experimental: Allow for rendering terminal output from the agents + .meta(acp::Meta::from_iter([ + ("terminal_output".into(), true.into()), + ("terminal-auth".into(), true.into()), + ])), + ) + .client_info( + acp::Implementation::new("zed", version) + .title(release_channel.map(ToOwned::to_owned)), + ), + ) .await?; if response.protocol_version < MINIMUM_SUPPORTED_VERSION { return Err(UnsupportedVersion.into()); } + let telemetry_id = response + .agent_info + // Use the one the agent provides if we have one + .map(|info| info.name.into()) + // Otherwise, just use the name + .unwrap_or_else(|| server_name.clone()); + Ok(Self { auth_methods: response.auth_methods, root_dir: root_dir.to_owned(), @@ -207,6 +216,7 @@ impl AcpConnection { sessions, agent_capabilities: response.agent_capabilities, default_mode, + default_model, _io_task: io_task, _wait_task: wait_task, _stderr_task: stderr_task, @@ -231,8 +241,8 @@ impl Drop for AcpConnection { } impl AgentConnection for AcpConnection { - fn telemetry_id(&self) -> &'static str { - self.telemetry_id + fn telemetry_id(&self) -> SharedString { + self.telemetry_id.clone() } fn new_thread( @@ -245,6 +255,7 @@ impl AgentConnection for AcpConnection { let conn = self.connection.clone(); let sessions = self.sessions.clone(); let default_mode = self.default_mode.clone(); + let default_model = self.default_model.clone(); let cwd = cwd.to_path_buf(); let context_server_store = project.read(cx).context_server_store().read(cx); let mcp_servers = if project.read(cx).is_local() { @@ -253,23 +264,37 @@ impl AgentConnection for AcpConnection { .iter() .filter_map(|id| { let configuration = context_server_store.configuration_for_server(id)?; - let command = configuration.command(); - Some(acp::McpServer::Stdio { - name: id.0.to_string(), - command: command.path.clone(), - args: command.args.clone(), - env: if let Some(env) = command.env.as_ref() { - env.iter() - .map(|(name, value)| acp::EnvVariable { - name: name.clone(), - value: value.clone(), - meta: None, - }) - .collect() - } else { - vec![] - }, - }) + match &*configuration { + project::context_server_store::ContextServerConfiguration::Custom { + command, + .. + } + | project::context_server_store::ContextServerConfiguration::Extension { + command, + .. + } => Some(acp::McpServer::Stdio( + acp::McpServerStdio::new(id.0.to_string(), &command.path) + .args(command.args.clone()) + .env(if let Some(env) = command.env.as_ref() { + env.iter() + .map(|(name, value)| acp::EnvVariable::new(name, value)) + .collect() + } else { + vec![] + }), + )), + project::context_server_store::ContextServerConfiguration::Http { + url, + headers, + } => Some(acp::McpServer::Http( + acp::McpServerHttp::new(id.0.to_string(), url.to_string()).headers( + headers + .iter() + .map(|(name, value)| acp::HttpHeader::new(name, value)) + .collect(), + ), + )), + } }) .collect() } else { @@ -281,13 +306,13 @@ impl AgentConnection for AcpConnection { cx.spawn(async move |cx| { let response = conn - .new_session(acp::NewSessionRequest { mcp_servers, cwd, meta: None }) + .new_session(acp::NewSessionRequest::new(cwd).mcp_servers(mcp_servers)) .await .map_err(|err| { - if err.code == acp::ErrorCode::AUTH_REQUIRED.code { + if err.code == acp::ErrorCode::AuthRequired { let mut error = AuthRequired::new(); - if err.message != acp::ErrorCode::AUTH_REQUIRED.message { + if err.message != acp::ErrorCode::AuthRequired.to_string() { error = error.with_description(err.message); } @@ -312,12 +337,9 @@ impl AgentConnection for AcpConnection { let default_mode = default_mode.clone(); let session_id = response.session_id.clone(); let modes = modes.clone(); + let conn = conn.clone(); async move |_| { - let result = conn.set_session_mode(acp::SetSessionModeRequest { - session_id, - mode_id: default_mode, - meta: None, - }) + let result = conn.set_session_mode(acp::SetSessionModeRequest::new(session_id, default_mode)) .await.log_err(); if result.is_none() { @@ -346,6 +368,49 @@ impl AgentConnection for AcpConnection { } } + if let Some(default_model) = default_model { + if let Some(models) = models.as_ref() { + let mut models_ref = models.borrow_mut(); + let has_model = models_ref.available_models.iter().any(|model| model.model_id == default_model); + + if has_model { + let initial_model_id = models_ref.current_model_id.clone(); + + cx.spawn({ + let default_model = default_model.clone(); + let session_id = response.session_id.clone(); + let models = models.clone(); + let conn = conn.clone(); + async move |_| { + let result = conn.set_session_model(acp::SetSessionModelRequest::new(session_id, default_model)) + .await.log_err(); + + if result.is_none() { + models.borrow_mut().current_model_id = initial_model_id; + } + } + }).detach(); + + models_ref.current_model_id = default_model; + } else { + let available_models = models_ref + .available_models + .iter() + .map(|model| format!("- `{}`: {}", model.model_id, model.name)) + .collect::>() + .join("\n"); + + log::warn!( + "`{default_model}` is not a valid {name} model. Available options:\n{available_models}", + ); + } + } else { + log::warn!( + "`{name}` does not support model selection, but `default_model` was set in settings.", + ); + } + } + let session_id = response.session_id; let action_log = cx.new(|_| ActionLog::new(project.clone()))?; let thread = cx.new(|cx| { @@ -381,12 +446,8 @@ impl AgentConnection for AcpConnection { fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task> { let conn = self.connection.clone(); cx.foreground_executor().spawn(async move { - conn.authenticate(acp::AuthenticateRequest { - method_id: method_id.clone(), - meta: None, - }) - .await?; - + conn.authenticate(acp::AuthenticateRequest::new(method_id)) + .await?; Ok(()) }) } @@ -413,11 +474,11 @@ impl AgentConnection for AcpConnection { match result { Ok(response) => Ok(response), Err(err) => { - if err.code == acp::ErrorCode::AUTH_REQUIRED.code { + if err.code == acp::ErrorCode::AuthRequired { return Err(anyhow!(acp::Error::auth_required())); } - if err.code != ErrorCode::INTERNAL_ERROR.code { + if err.code != ErrorCode::InternalError { anyhow::bail!(err) } @@ -440,10 +501,7 @@ impl AgentConnection for AcpConnection { && (details.contains("This operation was aborted") || details.contains("The user aborted a request")) { - Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Cancelled, - meta: None, - }) + Ok(acp::PromptResponse::new(acp::StopReason::Cancelled)) } else { Err(anyhow!(details)) } @@ -460,10 +518,7 @@ impl AgentConnection for AcpConnection { session.suppress_abort_err = true; } let conn = self.connection.clone(); - let params = acp::CancelNotification { - session_id: session_id.clone(), - meta: None, - }; + let params = acp::CancelNotification::new(session_id.clone()); cx.foreground_executor() .spawn(async move { conn.cancel(params).await }) .detach(); @@ -544,11 +599,7 @@ impl acp_thread::AgentSessionModes for AcpSessionModes { let state = self.state.clone(); cx.foreground_executor().spawn(async move { let result = connection - .set_session_mode(acp::SetSessionModeRequest { - session_id, - mode_id, - meta: None, - }) + .set_session_mode(acp::SetSessionModeRequest::new(session_id, mode_id)) .await; if result.is_err() { @@ -607,11 +658,7 @@ impl acp_thread::AgentModelSelector for AcpModelSelector { let state = self.state.clone(); cx.foreground_executor().spawn(async move { let result = connection - .set_session_model(acp::SetSessionModelRequest { - session_id, - model_id, - meta: None, - }) + .set_session_model(acp::SetSessionModelRequest::new(session_id, model_id)) .await; if result.is_err() { @@ -673,10 +720,7 @@ impl acp::Client for ClientDelegate { let outcome = task.await; - Ok(acp::RequestPermissionResponse { - outcome, - meta: None, - }) + Ok(acp::RequestPermissionResponse::new(outcome)) } async fn write_text_file( @@ -708,10 +752,7 @@ impl acp::Client for ClientDelegate { let content = task.await?; - Ok(acp::ReadTextFileResponse { - content, - meta: None, - }) + Ok(acp::ReadTextFileResponse::new(content)) } async fn session_notification( @@ -746,7 +787,7 @@ impl acp::Client for ClientDelegate { if let Some(terminal_info) = meta.get("terminal_info") { if let Some(id_str) = terminal_info.get("terminal_id").and_then(|v| v.as_str()) { - let terminal_id = acp::TerminalId(id_str.into()); + let terminal_id = acp::TerminalId::new(id_str); let cwd = terminal_info .get("cwd") .and_then(|v| v.as_str().map(PathBuf::from)); @@ -762,7 +803,7 @@ impl acp::Client for ClientDelegate { let lower = cx.new(|cx| builder.subscribe(cx)); thread.on_terminal_provider_event( TerminalProviderEvent::Created { - terminal_id: terminal_id.clone(), + terminal_id, label: tc.title.clone(), cwd, output_byte_limit: None, @@ -787,15 +828,12 @@ impl acp::Client for ClientDelegate { if let Some(meta) = &tcu.meta { if let Some(term_out) = meta.get("terminal_output") { if let Some(id_str) = term_out.get("terminal_id").and_then(|v| v.as_str()) { - let terminal_id = acp::TerminalId(id_str.into()); + let terminal_id = acp::TerminalId::new(id_str); if let Some(s) = term_out.get("data").and_then(|v| v.as_str()) { let data = s.as_bytes().to_vec(); let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| { thread.on_terminal_provider_event( - TerminalProviderEvent::Output { - terminal_id: terminal_id.clone(), - data, - }, + TerminalProviderEvent::Output { terminal_id, data }, cx, ); }); @@ -806,21 +844,24 @@ impl acp::Client for ClientDelegate { // terminal_exit if let Some(term_exit) = meta.get("terminal_exit") { if let Some(id_str) = term_exit.get("terminal_id").and_then(|v| v.as_str()) { - let terminal_id = acp::TerminalId(id_str.into()); - let status = acp::TerminalExitStatus { - exit_code: term_exit - .get("exit_code") - .and_then(|v| v.as_u64()) - .map(|i| i as u32), - signal: term_exit - .get("signal") - .and_then(|v| v.as_str().map(|s| s.to_string())), - meta: None, - }; + let terminal_id = acp::TerminalId::new(id_str); + let status = acp::TerminalExitStatus::new() + .exit_code( + term_exit + .get("exit_code") + .and_then(|v| v.as_u64()) + .map(|i| i as u32), + ) + .signal( + term_exit + .get("signal") + .and_then(|v| v.as_str().map(|s| s.to_string())), + ); + let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| { thread.on_terminal_provider_event( TerminalProviderEvent::Exit { - terminal_id: terminal_id.clone(), + terminal_id, status, }, cx, @@ -857,7 +898,7 @@ impl acp::Client for ClientDelegate { // Register with renderer let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| { thread.register_terminal_created( - acp::TerminalId(uuid::Uuid::new_v4().to_string().into()), + acp::TerminalId::new(uuid::Uuid::new_v4().to_string()), format!("{} {}", args.command, args.args.join(" ")), args.cwd.clone(), args.output_byte_limit, @@ -867,10 +908,7 @@ impl acp::Client for ClientDelegate { })?; let terminal_id = terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?; - Ok(acp::CreateTerminalResponse { - terminal_id, - meta: None, - }) + Ok(acp::CreateTerminalResponse::new(terminal_id)) } async fn kill_terminal_command( @@ -931,10 +969,7 @@ impl acp::Client for ClientDelegate { })?? .await; - Ok(acp::WaitForTerminalExitResponse { - exit_status, - meta: None, - }) + Ok(acp::WaitForTerminalExitResponse::new(exit_status)) } } diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index b44c2123fb5052e2487464d813936cd1edf9821a..46e8508e44f07e4fb3d613e30387d5afd3f38423 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -56,7 +56,6 @@ impl AgentServerDelegate { pub trait AgentServer: Send { fn logo(&self) -> ui::IconName; fn name(&self) -> SharedString; - fn telemetry_id(&self) -> &'static str; fn default_mode(&self, _cx: &mut App) -> Option { None } @@ -68,6 +67,18 @@ pub trait AgentServer: Send { ) { } + fn default_model(&self, _cx: &mut App) -> Option { + None + } + + fn set_default_model( + &self, + _model_id: Option, + _fs: Arc, + _cx: &mut App, + ) { + } + fn connect( &self, root_dir: Option<&Path>, diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index cd3207824a7c05ddfaafeca965deea0918ccfb39..e67ddd5c0698758fdec7c7796b26a1351e9990e5 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -22,10 +22,6 @@ pub struct AgentServerLoginCommand { } impl AgentServer for ClaudeCode { - fn telemetry_id(&self) -> &'static str { - "claude-code" - } - fn name(&self) -> SharedString { "Claude Code".into() } @@ -41,7 +37,7 @@ impl AgentServer for ClaudeCode { settings .as_ref() - .and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into()))) + .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new)) } fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { @@ -55,6 +51,27 @@ impl AgentServer for ClaudeCode { }); } + fn default_model(&self, cx: &mut App) -> Option { + let settings = cx.read_global(|settings: &SettingsStore, _| { + settings.get::(None).claude.clone() + }); + + settings + .as_ref() + .and_then(|s| s.default_model.clone().map(acp::ModelId::new)) + } + + fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { + update_settings_file(fs, cx, |settings, _| { + settings + .agent_servers + .get_or_insert_default() + .claude + .get_or_insert_default() + .default_model = model_id.map(|m| m.to_string()) + }); + } + fn connect( &self, root_dir: Option<&Path>, @@ -62,12 +79,12 @@ impl AgentServer for ClaudeCode { cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let telemetry_id = self.telemetry_id(); let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); + let default_model = self.default_model(cx); cx.spawn(async move |cx| { let (command, root_dir, login) = store @@ -86,10 +103,10 @@ impl AgentServer for ClaudeCode { .await?; let connection = crate::acp::connect( name, - telemetry_id, command, root_dir.as_ref(), default_mode, + default_model, is_remote, cx, ) diff --git a/crates/agent_servers/src/codex.rs b/crates/agent_servers/src/codex.rs index 95375ad412c31272dbfce9262b4b5fd38fe55c50..c2b308e48b7a984b0374272c0059286e933916b3 100644 --- a/crates/agent_servers/src/codex.rs +++ b/crates/agent_servers/src/codex.rs @@ -23,10 +23,6 @@ pub(crate) mod tests { } impl AgentServer for Codex { - fn telemetry_id(&self) -> &'static str { - "codex" - } - fn name(&self) -> SharedString { "Codex".into() } @@ -42,7 +38,7 @@ impl AgentServer for Codex { settings .as_ref() - .and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into()))) + .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new)) } fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { @@ -56,6 +52,27 @@ impl AgentServer for Codex { }); } + fn default_model(&self, cx: &mut App) -> Option { + let settings = cx.read_global(|settings: &SettingsStore, _| { + settings.get::(None).codex.clone() + }); + + settings + .as_ref() + .and_then(|s| s.default_model.clone().map(acp::ModelId::new)) + } + + fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { + update_settings_file(fs, cx, |settings, _| { + settings + .agent_servers + .get_or_insert_default() + .codex + .get_or_insert_default() + .default_model = model_id.map(|m| m.to_string()) + }); + } + fn connect( &self, root_dir: Option<&Path>, @@ -63,12 +80,12 @@ impl AgentServer for Codex { cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let telemetry_id = self.telemetry_id(); let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); + let default_model = self.default_model(cx); cx.spawn(async move |cx| { let (command, root_dir, login) = store @@ -88,10 +105,10 @@ impl AgentServer for Codex { let connection = crate::acp::connect( name, - telemetry_id, command, root_dir.as_ref(), default_mode, + default_model, is_remote, cx, ) diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index a51ed8a51a24d28aa6f2867797207bb15643a67d..6b981ce8b8198b275e5d9aa05b6fb66431d22e08 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -1,4 +1,4 @@ -use crate::{AgentServerDelegate, load_proxy_env}; +use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; use acp_thread::AgentConnection; use agent_client_protocol as acp; use anyhow::{Context as _, Result}; @@ -20,11 +20,7 @@ impl CustomAgentServer { } } -impl crate::AgentServer for CustomAgentServer { - fn telemetry_id(&self) -> &'static str { - "custom" - } - +impl AgentServer for CustomAgentServer { fn name(&self) -> SharedString { self.name.clone() } @@ -44,19 +40,64 @@ impl crate::AgentServer for CustomAgentServer { settings .as_ref() - .and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into()))) + .and_then(|s| s.default_mode().map(acp::SessionModeId::new)) } fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { let name = self.name(); update_settings_file(fs, cx, move |settings, _| { + let settings = settings + .agent_servers + .get_or_insert_default() + .custom + .entry(name.clone()) + .or_insert_with(|| settings::CustomAgentServerSettings::Extension { + default_model: None, + default_mode: None, + }); + + match settings { + settings::CustomAgentServerSettings::Custom { default_mode, .. } + | settings::CustomAgentServerSettings::Extension { default_mode, .. } => { + *default_mode = mode_id.map(|m| m.to_string()); + } + } + }); + } + + fn default_model(&self, cx: &mut App) -> Option { + let settings = cx.read_global(|settings: &SettingsStore, _| { settings + .get::(None) + .custom + .get(&self.name()) + .cloned() + }); + + settings + .as_ref() + .and_then(|s| s.default_model().map(acp::ModelId::new)) + } + + fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { + let name = self.name(); + update_settings_file(fs, cx, move |settings, _| { + let settings = settings .agent_servers .get_or_insert_default() .custom - .get_mut(&name) - .unwrap() - .default_mode = mode_id.map(|m| m.to_string()) + .entry(name.clone()) + .or_insert_with(|| settings::CustomAgentServerSettings::Extension { + default_model: None, + default_mode: None, + }); + + match settings { + settings::CustomAgentServerSettings::Custom { default_model, .. } + | settings::CustomAgentServerSettings::Extension { default_model, .. } => { + *default_model = model_id.map(|m| m.to_string()); + } + } }); } @@ -67,13 +108,12 @@ impl crate::AgentServer for CustomAgentServer { cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let telemetry_id = self.telemetry_id(); let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); let is_remote = delegate.project.read(cx).is_via_remote_server(); let default_mode = self.default_mode(cx); + let default_model = self.default_model(cx); let store = delegate.store.downgrade(); let extra_env = load_proxy_env(cx); - cx.spawn(async move |cx| { let (command, root_dir, login) = store .update(cx, |store, cx| { @@ -93,10 +133,10 @@ impl crate::AgentServer for CustomAgentServer { .await?; let connection = crate::acp::connect( name, - telemetry_id, command, root_dir.as_ref(), default_mode, + default_model, is_remote, cx, ) diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 7618625278121cc1426f06ed8626a68759f34995..9db7535b5e55d88d6856774c20365bbac46fc81e 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -82,26 +82,9 @@ where .update(cx, |thread, cx| { thread.send( vec![ - acp::ContentBlock::Text(acp::TextContent { - text: "Read the file ".into(), - annotations: None, - meta: None, - }), - acp::ContentBlock::ResourceLink(acp::ResourceLink { - uri: "foo.rs".into(), - name: "foo.rs".into(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }), - acp::ContentBlock::Text(acp::TextContent { - text: " and tell me what the content of the println! is".into(), - annotations: None, - meta: None, - }), + "Read the file ".into(), + acp::ContentBlock::ResourceLink(acp::ResourceLink::new("foo.rs", "foo.rs")), + " and tell me what the content of the println! is".into(), ], cx, ) @@ -429,7 +412,7 @@ macro_rules! common_e2e_tests { async fn tool_call_with_permission(cx: &mut ::gpui::TestAppContext) { $crate::e2e_tests::test_tool_call_with_permission( $server, - ::agent_client_protocol::PermissionOptionId($allow_option_id.into()), + ::agent_client_protocol::PermissionOptionId::new($allow_option_id), cx, ) .await; @@ -476,6 +459,7 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { env: None, ignore_system_version: None, default_mode: None, + default_model: None, }), gemini: Some(crate::gemini::tests::local_command().into()), codex: Some(BuiltinAgentServerSettings { @@ -484,6 +468,7 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { env: None, ignore_system_version: None, default_mode: None, + default_model: None, }), custom: collections::HashMap::default(), }, diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index feaa221cbccb789ed3a89bed9f23d544e1d3b5f7..5fea74746aec73f3ea7bb33562244e4a6eea5ba7 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -12,10 +12,6 @@ use project::agent_server_store::GEMINI_NAME; pub struct Gemini; impl AgentServer for Gemini { - fn telemetry_id(&self) -> &'static str { - "gemini-cli" - } - fn name(&self) -> SharedString { "Gemini CLI".into() } @@ -31,12 +27,12 @@ impl AgentServer for Gemini { cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let telemetry_id = self.telemetry_id(); let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let mut extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); + let default_model = self.default_model(cx); cx.spawn(async move |cx| { extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); @@ -65,10 +61,10 @@ impl AgentServer for Gemini { let connection = crate::acp::connect( name, - telemetry_id, command, root_dir.as_ref(), default_mode, + default_model, is_remote, cx, ) diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index 8ddcac24fe054d1226f2bbac49498fd35d6ed1c3..0d7163549f0a4b172773c9ac95dcbc84b7212667 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -12,6 +12,7 @@ workspace = true path = "src/agent_settings.rs" [dependencies] +agent-client-protocol.workspace = true anyhow.workspace = true cloud_llm_client.workspace = true collections.workspace = true diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 084ac7c3e7a1be4920126f857145e64b65a255dd..b513ec1a70b6f7ab02382dfa312ea2d4d6a47234 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -2,14 +2,15 @@ mod agent_profile; use std::sync::Arc; -use collections::IndexMap; +use agent_client_protocol::ModelId; +use collections::{HashSet, IndexMap}; use gpui::{App, Pixels, px}; use language_model::LanguageModel; use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ - DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection, + DefaultAgentView, DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NotifyWhenAgentWaiting, RegisterSetting, Settings, }; @@ -24,13 +25,16 @@ pub struct AgentSettings { pub enabled: bool, pub button: bool, pub dock: DockPosition, + pub agents_panel_dock: DockSide, pub default_width: Pixels, pub default_height: Pixels, pub default_model: Option, pub inline_assistant_model: Option, + pub inline_assistant_use_streaming_tools: bool, pub commit_message_model: Option, pub thread_summary_model: Option, pub inline_alternatives: Vec, + pub favorite_models: Vec, pub default_profile: AgentProfileId, pub default_view: DefaultAgentView, pub profiles: IndexMap, @@ -94,6 +98,13 @@ impl AgentSettings { pub fn set_message_editor_max_lines(&self) -> usize { self.message_editor_min_lines * 2 } + + pub fn favorite_model_ids(&self) -> HashSet { + self.favorite_models + .iter() + .map(|sel| ModelId::new(format!("{}/{}", sel.provider.0, sel.model))) + .collect() + } } #[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)] @@ -151,13 +162,18 @@ impl Settings for AgentSettings { enabled: agent.enabled.unwrap(), button: agent.button.unwrap(), dock: agent.dock.unwrap(), + agents_panel_dock: agent.agents_panel_dock.unwrap(), default_width: px(agent.default_width.unwrap()), default_height: px(agent.default_height.unwrap()), default_model: Some(agent.default_model.unwrap()), inline_assistant_model: agent.inline_assistant_model, + inline_assistant_use_streaming_tools: agent + .inline_assistant_use_streaming_tools + .unwrap_or(true), commit_message_model: agent.commit_message_model, thread_summary_model: agent.thread_summary_model, inline_alternatives: agent.inline_alternatives.unwrap_or_default(), + favorite_models: agent.favorite_models, default_profile: AgentProfileId(agent.default_profile.unwrap()), default_view: agent.default_view.unwrap(), profiles: agent diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 724b53a017911edbd6e9dd88c410daf794889d4e..8a9633e578a85323f2a289bd83c169a1f5d7f272 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -13,7 +13,8 @@ path = "src/agent_ui.rs" doctest = false [features] -test-support = ["gpui/test-support", "language/test-support"] +test-support = ["assistant_text_thread/test-support", "eval_utils", "gpui/test-support", "language/test-support", "reqwest_client", "workspace/test-support", "agent/test-support"] +unit-eval = [] [dependencies] acp_thread.workspace = true @@ -39,6 +40,7 @@ component.workspace = true context_server.workspace = true db.workspace = true editor.workspace = true +eval_utils = { workspace = true, optional = true } extension.workspace = true extension_host.workspace = true feature_flags.workspace = true @@ -47,6 +49,7 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true +gpui_tokio.workspace = true html_to_markdown.workspace = true http_client.workspace = true indoc.workspace = true @@ -69,7 +72,7 @@ postage.workspace = true project.workspace = true prompt_store.workspace = true proto.workspace = true -ref-cast.workspace = true +rand.workspace = true release_channel.workspace = true rope.workspace = true rules_library.workspace = true @@ -83,7 +86,6 @@ smol.workspace = true streaming_diff.workspace = true task.workspace = true telemetry.workspace = true -telemetry_events.workspace = true terminal.workspace = true terminal_view.workspace = true text.workspace = true @@ -93,19 +95,24 @@ time_format.workspace = true ui.workspace = true ui_input.workspace = true url.workspace = true -urlencoding.workspace = true util.workspace = true +uuid.workspace = true watch.workspace = true workspace.workspace = true zed_actions.workspace = true +image.workspace = true +async-fs.workspace = true +reqwest_client = { workspace = true, optional = true } [dev-dependencies] acp_thread = { workspace = true, features = ["test-support"] } agent = { workspace = true, features = ["test-support"] } assistant_text_thread = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } +clock.workspace = true db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } +eval_utils.workspace = true gpui = { workspace = true, "features" = ["test-support"] } indoc.workspace = true language = { workspace = true, "features" = ["test-support"] } @@ -113,6 +120,7 @@ languages = { workspace = true, features = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -rand.workspace = true +semver.workspace = true +reqwest_client.workspace = true tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/agent_ui/src/acp.rs b/crates/agent_ui/src/acp.rs index 2e15cd424d6313d981ff8c000f5eeb958aec9370..7a740c2dc4b9fbc769aa847347a0aa56d5f51934 100644 --- a/crates/agent_ui/src/acp.rs +++ b/crates/agent_ui/src/acp.rs @@ -1,4 +1,3 @@ -mod completion_provider; mod entry_view_state; mod message_editor; mod mode_selector; diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs index 60f39e47dceb6daebc72bba7e3e4a5fc70676dd6..feae74a86bc241c5d2e01f0941eafc60210f1bf6 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -22,7 +22,7 @@ use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; pub struct EntryViewState { workspace: WeakEntity, - project: Entity, + project: WeakEntity, history_store: Entity, prompt_store: Option>, entries: Vec, @@ -34,7 +34,7 @@ pub struct EntryViewState { impl EntryViewState { pub fn new( workspace: WeakEntity, - project: Entity, + project: WeakEntity, history_store: Entity, prompt_store: Option>, prompt_capabilities: Rc>, @@ -328,7 +328,7 @@ impl Entry { fn create_terminal( workspace: WeakEntity, - project: Entity, + project: WeakEntity, terminal: Entity, window: &mut Window, cx: &mut App, @@ -336,9 +336,9 @@ fn create_terminal( cx.new(|cx| { let mut view = TerminalView::new( terminal.read(cx).inner().clone(), - workspace.clone(), + workspace, None, - project.downgrade(), + project, window, cx, ); @@ -405,7 +405,7 @@ mod tests { use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; use editor::RowInfo; use fs::FakeFs; - use gpui::{AppContext as _, SemanticVersion, TestAppContext}; + use gpui::{AppContext as _, TestAppContext}; use crate::acp::entry_view_state::EntryViewState; use multi_buffer::MultiBufferRow; @@ -432,24 +432,11 @@ mod tests { let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let tool_call = acp::ToolCall { - id: acp::ToolCallId("tool".into()), - title: "Tool call".into(), - kind: acp::ToolKind::Other, - status: acp::ToolCallStatus::InProgress, - content: vec![acp::ToolCallContent::Diff { - diff: acp::Diff { - path: "/project/hello.txt".into(), - old_text: Some("hi world".into()), - new_text: "hello world".into(), - meta: None, - }, - }], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - }; + let tool_call = acp::ToolCall::new("tool", "Tool call") + .status(acp::ToolCallStatus::InProgress) + .content(vec![acp::ToolCallContent::Diff( + acp::Diff::new("/project/hello.txt", "hello world").old_text("hi world"), + )]); let connection = Rc::new(StubAgentConnection::new()); let thread = cx .update(|_, cx| { @@ -471,7 +458,7 @@ mod tests { let view_state = cx.new(|_cx| { EntryViewState::new( workspace.downgrade(), - project.clone(), + project.downgrade(), history_store, None, Default::default(), @@ -539,7 +526,7 @@ mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); } } diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 4f919a6c0425e48575d09380339730d7ddb26172..308230a24c6d2ba7fb0c3995b886e9e924d8e1b7 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -1,66 +1,45 @@ use crate::{ ChatWithFollow, - acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion}, - context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content}, + completion_provider::{ + PromptCompletionProvider, PromptCompletionProviderDelegate, PromptContextAction, + PromptContextType, SlashCommandCompletion, + }, + mention_set::{ + Mention, MentionImage, MentionSet, insert_crease_for_mention, paste_images_as_context, + }, }; -use acp_thread::{MentionUri, selection_name}; -use agent::{HistoryStore, outline}; +use acp_thread::MentionUri; +use agent::HistoryStore; use agent_client_protocol as acp; -use agent_servers::{AgentServer, AgentServerDelegate}; use anyhow::{Result, anyhow}; -use assistant_slash_commands::codeblock_fence_for_path; -use collections::{HashMap, HashSet}; +use collections::HashSet; use editor::{ - Addon, Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, - EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay, - MultiBuffer, ToOffset, - actions::Paste, - display_map::{Crease, CreaseId, FoldId}, + Addon, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, + EditorEvent, EditorMode, EditorStyle, Inlay, MultiBuffer, MultiBufferOffset, + MultiBufferSnapshot, ToOffset, actions::Paste, code_context_menus::CodeContextMenu, scroll::Autoscroll, }; -use futures::{ - FutureExt as _, - future::{Shared, join_all}, -}; +use futures::{FutureExt as _, future::join_all}; use gpui::{ - Animation, AnimationExt as _, AppContext, ClipboardEntry, Context, Entity, EntityId, - EventEmitter, FocusHandle, Focusable, Image, ImageFormat, Img, KeyContext, SharedString, - Subscription, Task, TextStyle, WeakEntity, pulsating_between, + AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageFormat, + KeyContext, SharedString, Subscription, Task, TextStyle, WeakEntity, }; use language::{Buffer, Language, language_settings::InlayHintKind}; -use language_model::LanguageModelImage; -use postage::stream::Stream as _; -use project::{ - CompletionIntent, InlayHint, InlayHintLabel, InlayId, Project, ProjectItem, ProjectPath, - Worktree, -}; -use prompt_store::{PromptId, PromptStore}; +use project::{CompletionIntent, InlayHint, InlayHintLabel, InlayId, Project, Worktree}; +use prompt_store::PromptStore; use rope::Point; use settings::Settings; -use std::{ - cell::RefCell, - ffi::OsStr, - fmt::Write, - ops::{Range, RangeInclusive}, - path::{Path, PathBuf}, - rc::Rc, - sync::Arc, - time::Duration, -}; -use text::OffsetRangeExt; +use std::{cell::RefCell, fmt::Write, rc::Rc, sync::Arc}; use theme::ThemeSettings; -use ui::{ButtonLike, TintColor, Toggleable, prelude::*}; -use util::{ResultExt, debug_panic, rel_path::RelPath}; -use workspace::{CollaboratorId, Workspace, notifications::NotifyResultExt as _}; +use ui::prelude::*; +use util::{ResultExt, debug_panic}; +use workspace::{CollaboratorId, Workspace}; use zed_actions::agent::Chat; pub struct MessageEditor { - mention_set: MentionSet, + mention_set: Entity, editor: Entity, - project: Entity, workspace: WeakEntity, - history_store: Entity, - prompt_store: Option>, prompt_capabilities: Rc>, available_commands: Rc>>, agent_name: SharedString, @@ -80,10 +59,45 @@ impl EventEmitter for MessageEditor {} const COMMAND_HINT_INLAY_ID: InlayId = InlayId::Hint(0); +impl PromptCompletionProviderDelegate for Entity { + fn supports_images(&self, cx: &App) -> bool { + self.read(cx).prompt_capabilities.borrow().image + } + + fn supported_modes(&self, cx: &App) -> Vec { + let mut supported = vec![PromptContextType::File, PromptContextType::Symbol]; + if self.read(cx).prompt_capabilities.borrow().embedded_context { + supported.extend(&[ + PromptContextType::Thread, + PromptContextType::Fetch, + PromptContextType::Rules, + ]); + } + supported + } + + fn available_commands(&self, cx: &App) -> Vec { + self.read(cx) + .available_commands + .borrow() + .iter() + .map(|cmd| crate::completion_provider::AvailableCommand { + name: cmd.name.clone().into(), + description: cmd.description.clone().into(), + requires_argument: cmd.input.is_some(), + }) + .collect() + } + + fn confirm_command(&self, cx: &mut App) { + self.update(cx, |this, cx| this.send(cx)); + } +} + impl MessageEditor { pub fn new( workspace: WeakEntity, - project: Entity, + project: WeakEntity, history_store: Entity, prompt_store: Option>, prompt_capabilities: Rc>, @@ -101,15 +115,7 @@ impl MessageEditor { }, None, ); - let completion_provider = Rc::new(ContextPickerCompletionProvider::new( - cx.weak_entity(), - workspace.clone(), - history_store.clone(), - prompt_store.clone(), - prompt_capabilities.clone(), - available_commands.clone(), - )); - let mention_set = MentionSet::default(); + let editor = cx.new(|cx| { let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); @@ -117,9 +123,9 @@ impl MessageEditor { let mut editor = Editor::new(mode, buffer, None, window, cx); editor.set_placeholder_text(placeholder, window, cx); editor.set_show_indent_guides(false, cx); + editor.set_show_completions_on_input(Some(true)); editor.set_soft_wrap(); editor.set_use_modal_editing(true); - editor.set_completion_provider(Some(completion_provider.clone())); editor.set_context_menu_options(ContextMenuOptions { min_entries_visible: 12, max_entries_visible: 12, @@ -128,6 +134,19 @@ impl MessageEditor { editor.register_addon(MessageEditorAddon::new()); editor }); + let mention_set = + cx.new(|_cx| MentionSet::new(project, history_store.clone(), prompt_store.clone())); + let completion_provider = Rc::new(PromptCompletionProvider::new( + cx.entity(), + editor.downgrade(), + mention_set.clone(), + history_store.clone(), + prompt_store.clone(), + workspace.clone(), + )); + editor.update(cx, |editor, _cx| { + editor.set_completion_provider(Some(completion_provider.clone())) + }); cx.on_focus_in(&editor.focus_handle(cx), window, |_, _, cx| { cx.emit(MessageEditorEvent::Focus) @@ -146,9 +165,13 @@ impl MessageEditor { if let EditorEvent::Edited { .. } = event && !editor.read(cx).read_only(cx) { - let snapshot = editor.update(cx, |editor, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + this.mention_set + .update(cx, |mention_set, _cx| mention_set.remove_invalid(&snapshot)); + let new_hints = this - .command_hint(editor.buffer(), cx) + .command_hint(snapshot.buffer()) .into_iter() .collect::>(); let has_new_hint = !new_hints.is_empty(); @@ -162,11 +185,7 @@ impl MessageEditor { cx, ); has_hint = has_new_hint; - - editor.snapshot(window, cx) }); - this.mention_set.remove_invalid(snapshot); - cx.notify(); } } @@ -174,11 +193,8 @@ impl MessageEditor { Self { editor, - project, mention_set, workspace, - history_store, - prompt_store, prompt_capabilities, available_commands, agent_name, @@ -187,13 +203,12 @@ impl MessageEditor { } } - fn command_hint(&self, buffer: &Entity, cx: &App) -> Option { + fn command_hint(&self, snapshot: &MultiBufferSnapshot) -> Option { let available_commands = self.available_commands.borrow(); if available_commands.is_empty() { return None; } - let snapshot = buffer.read(cx).snapshot(cx); let parsed_command = SlashCommandCompletion::try_parse(&snapshot.text(), 0)?; if parsed_command.argument.is_some() { return None; @@ -204,10 +219,15 @@ impl MessageEditor { .iter() .find(|command| command.name == command_name)?; - let acp::AvailableCommandInput::Unstructured { mut hint } = - available_command.input.clone()?; + let acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput { + mut hint, + .. + }) = available_command.input.clone()? + else { + return None; + }; - let mut hint_pos = parsed_command.source_range.end + 1; + let mut hint_pos = MultiBufferOffset(parsed_command.source_range.end) + 1usize; if hint_pos > snapshot.len() { hint_pos = snapshot.len(); hint.insert(0, ' '); @@ -236,6 +256,9 @@ impl MessageEditor { window: &mut Window, cx: &mut Context, ) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; let uri = MentionUri::Thread { id: thread.id.clone(), name: thread.title.to_string(), @@ -254,7 +277,22 @@ impl MessageEditor { .text_anchor }); - self.confirm_mention_completion(thread.title, start, content_len, uri, window, cx) + let supports_images = self.prompt_capabilities.borrow().image; + + self.mention_set + .update(cx, |mention_set, cx| { + mention_set.confirm_mention_completion( + thread.title, + start, + content_len, + uri, + supports_images, + self.editor.clone(), + &workspace, + window, + cx, + ) + }) .detach(); } @@ -263,397 +301,22 @@ impl MessageEditor { &self.editor } - #[cfg(test)] - pub(crate) fn mention_set(&mut self) -> &mut MentionSet { - &mut self.mention_set - } - pub fn is_empty(&self, cx: &App) -> bool { self.editor.read(cx).is_empty(cx) } - pub fn mentions(&self) -> HashSet { - self.mention_set - .mentions - .values() - .map(|(uri, _)| uri.clone()) - .collect() - } - - pub fn confirm_mention_completion( - &mut self, - crease_text: SharedString, - start: text::Anchor, - content_len: usize, - mention_uri: MentionUri, - window: &mut Window, - cx: &mut Context, - ) -> Task<()> { - let snapshot = self - .editor - .update(cx, |editor, cx| editor.snapshot(window, cx)); - let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else { - return Task::ready(()); - }; - let excerpt_id = start_anchor.excerpt_id; - let end_anchor = snapshot - .buffer_snapshot() - .anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1); - - let crease = if let MentionUri::File { abs_path } = &mention_uri - && let Some(extension) = abs_path.extension() - && let Some(extension) = extension.to_str() - && Img::extensions().contains(&extension) - && !extension.contains("svg") - { - let Some(project_path) = self - .project - .read(cx) - .project_path_for_absolute_path(&abs_path, cx) - else { - log::error!("project path not found"); - return Task::ready(()); - }; - let image = self - .project - .update(cx, |project, cx| project.open_image(project_path, cx)); - let image = cx - .spawn(async move |_, cx| { - let image = image.await.map_err(|e| e.to_string())?; - let image = image - .update(cx, |image, _| image.image.clone()) - .map_err(|e| e.to_string())?; - Ok(image) - }) - .shared(); - insert_crease_for_mention( - excerpt_id, - start, - content_len, - mention_uri.name().into(), - IconName::Image.path().into(), - Some(image), - self.editor.clone(), - window, - cx, - ) - } else { - insert_crease_for_mention( - excerpt_id, - start, - content_len, - crease_text, - mention_uri.icon_path(cx), - None, - self.editor.clone(), - window, - cx, - ) - }; - let Some((crease_id, tx)) = crease else { - return Task::ready(()); - }; - - let task = match mention_uri.clone() { - MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx), - MentionUri::Directory { .. } => Task::ready(Ok(Mention::Link)), - MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx), - MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx), - MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx), - MentionUri::Symbol { - abs_path, - line_range, - .. - } => self.confirm_mention_for_symbol(abs_path, line_range, cx), - MentionUri::Rule { id, .. } => self.confirm_mention_for_rule(id, cx), - MentionUri::PastedImage => { - debug_panic!("pasted image URI should not be included in completions"); - Task::ready(Err(anyhow!( - "pasted imaged URI should not be included in completions" - ))) - } - MentionUri::Selection { .. } => { - debug_panic!("unexpected selection URI"); - Task::ready(Err(anyhow!("unexpected selection URI"))) - } - }; - let task = cx - .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) - .shared(); - self.mention_set - .mentions - .insert(crease_id, (mention_uri, task.clone())); - - // Notify the user if we failed to load the mentioned context - cx.spawn_in(window, async move |this, cx| { - let result = task.await.notify_async_err(cx); - drop(tx); - if result.is_none() { - this.update(cx, |this, cx| { - this.editor.update(cx, |editor, cx| { - // Remove mention - editor.edit([(start_anchor..end_anchor, "")], cx); - }); - this.mention_set.mentions.remove(&crease_id); - }) - .ok(); - } - }) - } - - fn confirm_mention_for_file( - &mut self, - abs_path: PathBuf, - cx: &mut Context, - ) -> Task> { - let Some(project_path) = self - .project + pub fn is_completions_menu_visible(&self, cx: &App) -> bool { + self.editor .read(cx) - .project_path_for_absolute_path(&abs_path, cx) - else { - return Task::ready(Err(anyhow!("project path not found"))); - }; - let extension = abs_path - .extension() - .and_then(OsStr::to_str) - .unwrap_or_default(); - - if Img::extensions().contains(&extension) && !extension.contains("svg") { - if !self.prompt_capabilities.borrow().image { - return Task::ready(Err(anyhow!("This model does not support images yet"))); - } - let task = self - .project - .update(cx, |project, cx| project.open_image(project_path, cx)); - return cx.spawn(async move |_, cx| { - let image = task.await?; - let image = image.update(cx, |image, _| image.image.clone())?; - let format = image.format; - let image = cx - .update(|cx| LanguageModelImage::from_image(image, cx))? - .await; - if let Some(image) = image { - Ok(Mention::Image(MentionImage { - data: image.source, - format, - })) - } else { - Err(anyhow!("Failed to convert image")) - } - }); - } - - let buffer = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); - cx.spawn(async move |_, cx| { - let buffer = buffer.await?; - let buffer_content = outline::get_buffer_content_or_outline( - buffer.clone(), - Some(&abs_path.to_string_lossy()), - &cx, - ) - .await?; - - Ok(Mention::Text { - content: buffer_content.text, - tracked_buffers: vec![buffer], - }) - }) - } - - fn confirm_mention_for_fetch( - &mut self, - url: url::Url, - cx: &mut Context, - ) -> Task> { - let http_client = match self - .workspace - .update(cx, |workspace, _| workspace.client().http_client()) - { - Ok(http_client) => http_client, - Err(e) => return Task::ready(Err(e)), - }; - cx.background_executor().spawn(async move { - let content = fetch_url_content(http_client, url.to_string()).await?; - Ok(Mention::Text { - content, - tracked_buffers: Vec::new(), - }) - }) - } - - fn confirm_mention_for_symbol( - &mut self, - abs_path: PathBuf, - line_range: RangeInclusive, - cx: &mut Context, - ) -> Task> { - let Some(project_path) = self - .project - .read(cx) - .project_path_for_absolute_path(&abs_path, cx) - else { - return Task::ready(Err(anyhow!("project path not found"))); - }; - let buffer = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); - cx.spawn(async move |_, cx| { - let buffer = buffer.await?; - let mention = buffer.update(cx, |buffer, cx| { - let start = Point::new(*line_range.start(), 0).min(buffer.max_point()); - let end = Point::new(*line_range.end() + 1, 0).min(buffer.max_point()); - let content = buffer.text_for_range(start..end).collect(); - Mention::Text { - content, - tracked_buffers: vec![cx.entity()], - } - })?; - anyhow::Ok(mention) - }) - } - - fn confirm_mention_for_rule( - &mut self, - id: PromptId, - cx: &mut Context, - ) -> Task> { - let Some(prompt_store) = self.prompt_store.clone() else { - return Task::ready(Err(anyhow!("missing prompt store"))); - }; - let prompt = prompt_store.read(cx).load(id, cx); - cx.spawn(async move |_, _| { - let prompt = prompt.await?; - Ok(Mention::Text { - content: prompt, - tracked_buffers: Vec::new(), - }) - }) + .context_menu() + .borrow() + .as_ref() + .is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()) } - pub fn confirm_mention_for_selection( - &mut self, - source_range: Range, - selections: Vec<(Entity, Range, Range)>, - window: &mut Window, - cx: &mut Context, - ) { - let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); - let Some(start) = snapshot.as_singleton_anchor(source_range.start) else { - return; - }; - - let offset = start.to_offset(&snapshot); - - for (buffer, selection_range, range_to_fold) in selections { - let range = snapshot.anchor_after(offset + range_to_fold.start) - ..snapshot.anchor_after(offset + range_to_fold.end); - - let abs_path = buffer - .read(cx) - .project_path(cx) - .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx)); - let snapshot = buffer.read(cx).snapshot(); - - let text = snapshot - .text_for_range(selection_range.clone()) - .collect::(); - let point_range = selection_range.to_point(&snapshot); - let line_range = point_range.start.row..=point_range.end.row; - - let uri = MentionUri::Selection { - abs_path: abs_path.clone(), - line_range: line_range.clone(), - }; - let crease = crate::context_picker::crease_for_mention( - selection_name(abs_path.as_deref(), &line_range).into(), - uri.icon_path(cx), - range, - self.editor.downgrade(), - ); - - let crease_id = self.editor.update(cx, |editor, cx| { - let crease_ids = editor.insert_creases(vec![crease.clone()], cx); - editor.fold_creases(vec![crease], false, window, cx); - crease_ids.first().copied().unwrap() - }); - - self.mention_set.mentions.insert( - crease_id, - ( - uri, - Task::ready(Ok(Mention::Text { - content: text, - tracked_buffers: vec![buffer], - })) - .shared(), - ), - ); - } - - // Take this explanation with a grain of salt but, with creases being - // inserted, GPUI's recomputes the editor layout in the next frames, so - // directly calling `editor.request_autoscroll` wouldn't work as - // expected. We're leveraging `cx.on_next_frame` to wait 2 frames and - // ensure that the layout has been recalculated so that the autoscroll - // request actually shows the cursor's new position. - let editor = self.editor.clone(); - cx.on_next_frame(window, move |_, window, cx| { - cx.on_next_frame(window, move |_, _, cx| { - editor.update(cx, |editor, cx| { - editor.request_autoscroll(Autoscroll::fit(), cx) - }); - }); - }); - } - - fn confirm_mention_for_thread( - &mut self, - id: acp::SessionId, - cx: &mut Context, - ) -> Task> { - let server = Rc::new(agent::NativeAgentServer::new( - self.project.read(cx).fs().clone(), - self.history_store.clone(), - )); - let delegate = AgentServerDelegate::new( - self.project.read(cx).agent_server_store().clone(), - self.project.clone(), - None, - None, - ); - let connection = server.connect(None, delegate, cx); - cx.spawn(async move |_, cx| { - let (agent, _) = connection.await?; - let agent = agent.downcast::().unwrap(); - let summary = agent - .0 - .update(cx, |agent, cx| agent.thread_summary(id, cx))? - .await?; - anyhow::Ok(Mention::Text { - content: summary.to_string(), - tracked_buffers: Vec::new(), - }) - }) - } - - fn confirm_mention_for_text_thread( - &mut self, - path: PathBuf, - cx: &mut Context, - ) -> Task> { - let text_thread_task = self.history_store.update(cx, |store, cx| { - store.load_text_thread(path.as_path().into(), cx) - }); - cx.spawn(async move |_, cx| { - let text_thread = text_thread_task.await?; - let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx))?; - Ok(Mention::Text { - content: xml, - tracked_buffers: Vec::new(), - }) - }) + #[cfg(test)] + pub fn mention_set(&self) -> &Entity { + &self.mention_set } fn validate_slash_commands( @@ -705,7 +368,7 @@ impl MessageEditor { let contents = self .mention_set - .contents(full_mention_content, self.project.clone(), cx); + .update(cx, |store, cx| store.contents(full_mention_content, cx)); let editor = self.editor.clone(); let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context; @@ -728,8 +391,8 @@ impl MessageEditor { }; let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot()); - if crease_range.start > ix { - let chunk = text[ix..crease_range.start].into(); + if crease_range.start.0 > ix { + let chunk = text[ix..crease_range.start.0].into(); chunks.push(chunk); } let chunk = match mention { @@ -739,34 +402,27 @@ impl MessageEditor { } => { all_tracked_buffers.extend(tracked_buffers.iter().cloned()); if supports_embedded_context { - acp::ContentBlock::Resource(acp::EmbeddedResource { - annotations: None, - resource: - acp::EmbeddedResourceResource::TextResourceContents( - acp::TextResourceContents { - mime_type: None, - text: content.clone(), - uri: uri.to_uri().to_string(), - meta: None, - }, + acp::ContentBlock::Resource(acp::EmbeddedResource::new( + acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents::new( + content.clone(), + uri.to_uri().to_string(), ), - meta: None, - }) + ), + )) } else { - acp::ContentBlock::ResourceLink(acp::ResourceLink { - name: uri.name(), - uri: uri.to_uri().to_string(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }) + acp::ContentBlock::ResourceLink(acp::ResourceLink::new( + uri.name(), + uri.to_uri().to_string(), + )) } } - Mention::Image(mention_image) => { - let uri = match uri { + Mention::Image(mention_image) => acp::ContentBlock::Image( + acp::ImageContent::new( + mention_image.data.clone(), + mention_image.format.mime_type(), + ) + .uri(match uri { MentionUri::File { .. } => Some(uri.to_uri().to_string()), MentionUri::PastedImage => None, other => { @@ -776,28 +432,14 @@ impl MessageEditor { ); None } - }; - acp::ContentBlock::Image(acp::ImageContent { - annotations: None, - data: mention_image.data.to_string(), - mime_type: mention_image.format.mime_type().into(), - uri, - meta: None, - }) - } - Mention::Link => acp::ContentBlock::ResourceLink(acp::ResourceLink { - name: uri.name(), - uri: uri.to_uri().to_string(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }), + }), + ), + Mention::Link => acp::ContentBlock::ResourceLink( + acp::ResourceLink::new(uri.name(), uri.to_uri().to_string()), + ), }; chunks.push(chunk); - ix = crease_range.end; + ix = crease_range.end.0; } if ix < text.len() { @@ -817,10 +459,12 @@ impl MessageEditor { self.editor.update(cx, |editor, cx| { editor.clear(window, cx); editor.remove_creases( - self.mention_set - .mentions - .drain() - .map(|(crease_id, _)| crease_id), + self.mention_set.update(cx, |mention_set, _cx| { + mention_set + .clear() + .map(|(crease_id, _)| crease_id) + .collect::>() + }), cx, ) }); @@ -836,6 +480,45 @@ impl MessageEditor { cx.emit(MessageEditorEvent::Send) } + pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context) { + let editor = self.editor.clone(); + + cx.spawn_in(window, async move |_, cx| { + editor + .update_in(cx, |editor, window, cx| { + let menu_is_open = + editor.context_menu().borrow().as_ref().is_some_and(|menu| { + matches!(menu, CodeContextMenu::Completions(_)) && menu.visible() + }); + + let has_at_sign = { + let snapshot = editor.display_snapshot(cx); + let cursor = editor.selections.newest::(&snapshot).head(); + let offset = cursor.to_offset(&snapshot); + if offset.0 > 0 { + snapshot + .buffer_snapshot() + .reversed_chars_at(offset) + .next() + .map(|sign| sign == '@') + .unwrap_or(false) + } else { + false + } + }; + + if menu_is_open && has_at_sign { + return; + } + + editor.insert("@", window, cx); + editor.show_completions(&editor::actions::ShowCompletions, window, cx); + }) + .log_err(); + }) + .detach(); + } + fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context) { self.send(cx); } @@ -860,111 +543,150 @@ impl MessageEditor { } fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { - if !self.prompt_capabilities.borrow().image { - return; - } - - let images = cx + let editor_clipboard_selections = cx .read_from_clipboard() - .map(|item| { - item.into_entries() - .filter_map(|entry| { - if let ClipboardEntry::Image(image) = entry { - Some(image) - } else { - None - } - }) - .collect::>() - }) - .unwrap_or_default(); - - if images.is_empty() { - return; - } - cx.stop_propagation(); - - let replacement_text = MentionUri::PastedImage.as_link().to_string(); - for image in images { - let (excerpt_id, text_anchor, multibuffer_anchor) = - self.editor.update(cx, |message_editor, cx| { - let snapshot = message_editor.snapshot(window, cx); - let (excerpt_id, _, buffer_snapshot) = - snapshot.buffer_snapshot().as_singleton().unwrap(); - - let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len()); - let multibuffer_anchor = snapshot - .buffer_snapshot() - .anchor_in_excerpt(*excerpt_id, text_anchor); - message_editor.edit( - [( - multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), - format!("{replacement_text} "), - )], - cx, - ); - (*excerpt_id, text_anchor, multibuffer_anchor) - }); + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + ClipboardEntry::String(text) => { + text.metadata_json::>() + } + _ => None, + }); - let content_len = replacement_text.len(); - let Some(start_anchor) = multibuffer_anchor else { - continue; - }; - let end_anchor = self.editor.update(cx, |editor, cx| { - let snapshot = editor.buffer().read(cx).snapshot(cx); - snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len) + let has_file_context = editor_clipboard_selections + .as_ref() + .is_some_and(|selections| { + selections + .iter() + .any(|sel| sel.file_path.is_some() && sel.line_range.is_some()) }); - let image = Arc::new(image); - let Some((crease_id, tx)) = insert_crease_for_mention( - excerpt_id, - text_anchor, - content_len, - MentionUri::PastedImage.name().into(), - IconName::Image.path().into(), - Some(Task::ready(Ok(image.clone())).shared()), - self.editor.clone(), - window, - cx, - ) else { - continue; - }; - let task = cx - .spawn_in(window, { - async move |_, cx| { - let format = image.format; - let image = cx - .update(|_, cx| LanguageModelImage::from_image(image, cx)) - .map_err(|e| e.to_string())? - .await; - drop(tx); - if let Some(image) = image { - Ok(Mention::Image(MentionImage { - data: image.source, - format, - })) - } else { - Err("Failed to convert image".into()) - } + + if has_file_context { + if let Some((workspace, selections)) = + self.workspace.upgrade().zip(editor_clipboard_selections) + { + let Some(first_selection) = selections.first() else { + return; + }; + if let Some(file_path) = &first_selection.file_path { + // In case someone pastes selections from another window + // with a different project, we don't want to insert the + // crease (containing the absolute path) since the agent + // cannot access files outside the project. + let is_in_project = workspace + .read(cx) + .project() + .read(cx) + .project_path_for_absolute_path(file_path, cx) + .is_some(); + if !is_in_project { + return; } - }) - .shared(); + } + + cx.stop_propagation(); + let insertion_target = self + .editor + .read(cx) + .selections + .newest_anchor() + .start + .text_anchor; + + let project = workspace.read(cx).project().clone(); + for selection in selections { + if let (Some(file_path), Some(line_range)) = + (selection.file_path, selection.line_range) + { + let crease_text = + acp_thread::selection_name(Some(file_path.as_ref()), &line_range); + + let mention_uri = MentionUri::Selection { + abs_path: Some(file_path.clone()), + line_range: line_range.clone(), + }; + + let mention_text = mention_uri.as_link().to_string(); + let (excerpt_id, text_anchor, content_len) = + self.editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx); + let snapshot = buffer.snapshot(cx); + let (excerpt_id, _, buffer_snapshot) = + snapshot.as_singleton().unwrap(); + let text_anchor = insertion_target.bias_left(&buffer_snapshot); + + editor.insert(&mention_text, window, cx); + editor.insert(" ", window, cx); + + (*excerpt_id, text_anchor, mention_text.len()) + }); + + let Some((crease_id, tx)) = insert_crease_for_mention( + excerpt_id, + text_anchor, + content_len, + crease_text.into(), + mention_uri.icon_path(cx), + None, + self.editor.clone(), + window, + cx, + ) else { + continue; + }; + drop(tx); - self.mention_set - .mentions - .insert(crease_id, (MentionUri::PastedImage, task.clone())); + let mention_task = cx + .spawn({ + let project = project.clone(); + async move |_, cx| { + let project_path = project + .update(cx, |project, cx| { + project.project_path_for_absolute_path(&file_path, cx) + }) + .map_err(|e| e.to_string())? + .ok_or_else(|| "project path not found".to_string())?; + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer(project_path, cx) + }) + .map_err(|e| e.to_string())? + .await + .map_err(|e| e.to_string())?; + + buffer + .update(cx, |buffer, cx| { + let start = Point::new(*line_range.start(), 0) + .min(buffer.max_point()); + let end = Point::new(*line_range.end() + 1, 0) + .min(buffer.max_point()); + let content = + buffer.text_for_range(start..end).collect(); + Mention::Text { + content, + tracked_buffers: vec![cx.entity()], + } + }) + .map_err(|e| e.to_string()) + } + }) + .shared(); - cx.spawn_in(window, async move |this, cx| { - if task.await.notify_async_err(cx).is_none() { - this.update(cx, |this, cx| { - this.editor.update(cx, |editor, cx| { - editor.edit([(start_anchor..end_anchor, "")], cx); + self.mention_set.update(cx, |mention_set, _cx| { + mention_set.insert_mention(crease_id, mention_uri.clone(), mention_task) }); - this.mention_set.mentions.remove(&crease_id); - }) - .ok(); + } } - }) - .detach(); + return; + } + } + + if self.prompt_capabilities.borrow().image + && let Some(task) = + paste_images_as_context(self.editor.clone(), self.mention_set.clone(), window, cx) + { + task.detach(); } } @@ -975,26 +697,29 @@ impl MessageEditor { window: &mut Window, cx: &mut Context, ) { - let path_style = self.project.read(cx).path_style(cx); + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let project = workspace.read(cx).project().clone(); + let path_style = project.read(cx).path_style(cx); let buffer = self.editor.read(cx).buffer().clone(); let Some(buffer) = buffer.read(cx).as_singleton() else { return; }; let mut tasks = Vec::new(); for path in paths { - let Some(entry) = self.project.read(cx).entry_for_path(&path, cx) else { + let Some(entry) = project.read(cx).entry_for_path(&path, cx) else { continue; }; - let Some(worktree) = self.project.read(cx).worktree_for_id(path.worktree_id, cx) else { + let Some(worktree) = project.read(cx).worktree_for_id(path.worktree_id, cx) else { continue; }; let abs_path = worktree.read(cx).absolutize(&path.path); - let (file_name, _) = - crate::context_picker::file_context_picker::extract_file_name_and_directory( - &path.path, - worktree.read(cx).root_name(), - path_style, - ); + let (file_name, _) = crate::completion_provider::extract_file_name_and_directory( + &path.path, + worktree.read(cx).root_name(), + path_style, + ); let uri = if entry.is_dir() { MentionUri::Directory { abs_path } @@ -1016,14 +741,20 @@ impl MessageEditor { cx, ); }); - tasks.push(self.confirm_mention_completion( - file_name, - anchor, - content_len, - uri, - window, - cx, - )); + let supports_images = self.prompt_capabilities.borrow().image; + tasks.push(self.mention_set.update(cx, |mention_set, cx| { + mention_set.confirm_mention_completion( + file_name, + anchor, + content_len, + uri, + supports_images, + self.editor.clone(), + &workspace, + window, + cx, + ) + })); } cx.spawn(async move |_, _| { join_all(tasks).await; @@ -1041,18 +772,21 @@ impl MessageEditor { let cursor_anchor = editor.selections.newest_anchor().head(); let cursor_offset = cursor_anchor.to_offset(&editor_buffer.snapshot(cx)); let anchor = buffer.update(cx, |buffer, _cx| { - buffer.anchor_before(cursor_offset.min(buffer.len())) + buffer.anchor_before(cursor_offset.0.min(buffer.len())) }); let Some(workspace) = self.workspace.upgrade() else { return; }; - let Some(completion) = ContextPickerCompletionProvider::completion_for_action( - ContextPickerAction::AddSelections, - anchor..anchor, - cx.weak_entity(), - &workspace, - cx, - ) else { + let Some(completion) = + PromptCompletionProvider::>::completion_for_action( + PromptContextAction::AddSelections, + anchor..anchor, + self.editor.downgrade(), + self.mention_set.downgrade(), + &workspace, + cx, + ) + else { return; }; @@ -1085,9 +819,13 @@ impl MessageEditor { window: &mut Window, cx: &mut Context, ) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + self.clear(window, cx); - let path_style = self.project.read(cx).path_style(cx); + let path_style = workspace.read(cx).project().read(cx).path_style(cx); let mut text = String::new(); let mut mentions = Vec::new(); @@ -1130,8 +868,7 @@ impl MessageEditor { uri, data, mime_type, - annotations: _, - meta: _, + .. }) => { let mention_uri = if let Some(uri) = uri { MentionUri::parse(&uri, path_style) @@ -1157,7 +894,7 @@ impl MessageEditor { }), )); } - acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => {} + _ => {} } } @@ -1167,7 +904,7 @@ impl MessageEditor { }); for (range, mention_uri, mention) in mentions { - let anchor = snapshot.anchor_before(range.start); + let anchor = snapshot.anchor_before(MultiBufferOffset(range.start)); let Some((crease_id, tx)) = insert_crease_for_mention( anchor.excerpt_id, anchor.text_anchor, @@ -1183,10 +920,13 @@ impl MessageEditor { }; drop(tx); - self.mention_set.mentions.insert( - crease_id, - (mention_uri.clone(), Task::ready(Ok(mention)).shared()), - ); + self.mention_set.update(cx, |mention_set, _cx| { + mention_set.insert_mention( + crease_id, + mention_uri.clone(), + Task::ready(Ok(mention)).shared(), + ) + }); } cx.notify(); } @@ -1214,111 +954,6 @@ impl MessageEditor { } } -fn full_mention_for_directory( - project: &Entity, - abs_path: &Path, - cx: &mut App, -) -> Task> { - fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc, String)> { - let mut files = Vec::new(); - - for entry in worktree.child_entries(path) { - if entry.is_dir() { - files.extend(collect_files_in_path(worktree, &entry.path)); - } else if entry.is_file() { - files.push(( - entry.path.clone(), - worktree - .full_path(&entry.path) - .to_string_lossy() - .to_string(), - )); - } - } - - files - } - - let Some(project_path) = project - .read(cx) - .project_path_for_absolute_path(&abs_path, cx) - else { - return Task::ready(Err(anyhow!("project path not found"))); - }; - let Some(entry) = project.read(cx).entry_for_path(&project_path, cx) else { - return Task::ready(Err(anyhow!("project entry not found"))); - }; - let directory_path = entry.path.clone(); - let worktree_id = project_path.worktree_id; - let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) else { - return Task::ready(Err(anyhow!("worktree not found"))); - }; - let project = project.clone(); - cx.spawn(async move |cx| { - let file_paths = worktree.read_with(cx, |worktree, _cx| { - collect_files_in_path(worktree, &directory_path) - })?; - let descendants_future = cx.update(|cx| { - join_all(file_paths.into_iter().map(|(worktree_path, full_path)| { - let rel_path = worktree_path - .strip_prefix(&directory_path) - .log_err() - .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into()); - - let open_task = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - let project_path = ProjectPath { - worktree_id, - path: worktree_path, - }; - buffer_store.open_buffer(project_path, cx) - }) - }); - - cx.spawn(async move |cx| { - let buffer = open_task.await.log_err()?; - let buffer_content = outline::get_buffer_content_or_outline( - buffer.clone(), - Some(&full_path), - &cx, - ) - .await - .ok()?; - - Some((rel_path, full_path, buffer_content.text, buffer)) - }) - })) - })?; - - let contents = cx - .background_spawn(async move { - let (contents, tracked_buffers) = descendants_future - .await - .into_iter() - .flatten() - .map(|(rel_path, full_path, rope, buffer)| { - ((rel_path, full_path, rope), buffer) - }) - .unzip(); - Mention::Text { - content: render_directory_contents(contents), - tracked_buffers, - } - }) - .await; - anyhow::Ok(contents) - }) -} - -fn render_directory_contents(entries: Vec<(Arc, String, String)>) -> String { - let mut output = String::new(); - for (_relative_path, full_path, content) in entries { - let fence = codeblock_fence_for_path(Some(&full_path), None); - write!(output, "\n{fence}\n{content}\n```").unwrap(); - } - output -} - impl Focusable for MessageEditor { fn focus_handle(&self, cx: &App) -> FocusHandle { self.editor.focus_handle(cx) @@ -1362,233 +997,6 @@ impl Render for MessageEditor { } } -pub(crate) fn insert_crease_for_mention( - excerpt_id: ExcerptId, - anchor: text::Anchor, - content_len: usize, - crease_label: SharedString, - crease_icon: SharedString, - // abs_path: Option>, - image: Option, String>>>>, - editor: Entity, - window: &mut Window, - cx: &mut App, -) -> Option<(CreaseId, postage::barrier::Sender)> { - let (tx, rx) = postage::barrier::channel(); - - let crease_id = editor.update(cx, |editor, cx| { - let snapshot = editor.buffer().read(cx).snapshot(cx); - - let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; - - let start = start.bias_right(&snapshot); - let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); - - let placeholder = FoldPlaceholder { - render: render_mention_fold_button( - crease_label, - crease_icon, - start..end, - rx, - image, - cx.weak_entity(), - cx, - ), - merge_adjacent: false, - ..Default::default() - }; - - let crease = Crease::Inline { - range: start..end, - placeholder, - render_toggle: None, - render_trailer: None, - metadata: None, - }; - - let ids = editor.insert_creases(vec![crease.clone()], cx); - editor.fold_creases(vec![crease], false, window, cx); - - Some(ids[0]) - })?; - - Some((crease_id, tx)) -} - -fn render_mention_fold_button( - label: SharedString, - icon: SharedString, - range: Range, - mut loading_finished: postage::barrier::Receiver, - image_task: Option, String>>>>, - editor: WeakEntity, - cx: &mut App, -) -> Arc, &mut App) -> AnyElement> { - let loading = cx.new(|cx| { - let loading = cx.spawn(async move |this, cx| { - loading_finished.recv().await; - this.update(cx, |this: &mut LoadingContext, cx| { - this.loading = None; - cx.notify(); - }) - .ok(); - }); - LoadingContext { - id: cx.entity_id(), - label, - icon, - range, - editor, - loading: Some(loading), - image: image_task.clone(), - } - }); - Arc::new(move |_fold_id, _fold_range, _cx| loading.clone().into_any_element()) -} - -struct LoadingContext { - id: EntityId, - label: SharedString, - icon: SharedString, - range: Range, - editor: WeakEntity, - loading: Option>, - image: Option, String>>>>, -} - -impl Render for LoadingContext { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let is_in_text_selection = self - .editor - .update(cx, |editor, cx| editor.is_range_selected(&self.range, cx)) - .unwrap_or_default(); - ButtonLike::new(("loading-context", self.id)) - .style(ButtonStyle::Filled) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .toggle_state(is_in_text_selection) - .when_some(self.image.clone(), |el, image_task| { - el.hoverable_tooltip(move |_, cx| { - let image = image_task.peek().cloned().transpose().ok().flatten(); - let image_task = image_task.clone(); - cx.new::(|cx| ImageHover { - image, - _task: cx.spawn(async move |this, cx| { - if let Ok(image) = image_task.clone().await { - this.update(cx, |this, cx| { - if this.image.replace(image).is_none() { - cx.notify(); - } - }) - .ok(); - } - }), - }) - .into() - }) - }) - .child( - h_flex() - .gap_1() - .child( - Icon::from_path(self.icon.clone()) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(self.label.clone()) - .size(LabelSize::Small) - .buffer_font(cx) - .single_line(), - ) - .map(|el| { - if self.loading.is_some() { - el.with_animation( - "loading-context-crease", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.opacity(delta), - ) - .into_any() - } else { - el.into_any() - } - }), - ) - } -} - -struct ImageHover { - image: Option>, - _task: Task<()>, -} - -impl Render for ImageHover { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - if let Some(image) = self.image.clone() { - gpui::img(image).max_w_96().max_h_96().into_any_element() - } else { - gpui::Empty.into_any_element() - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum Mention { - Text { - content: String, - tracked_buffers: Vec>, - }, - Image(MentionImage), - Link, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct MentionImage { - pub data: SharedString, - pub format: ImageFormat, -} - -#[derive(Default)] -pub struct MentionSet { - mentions: HashMap>>)>, -} - -impl MentionSet { - fn contents( - &self, - full_mention_content: bool, - project: Entity, - cx: &mut App, - ) -> Task>> { - let mentions = self.mentions.clone(); - cx.spawn(async move |cx| { - let mut contents = HashMap::default(); - for (crease_id, (mention_uri, task)) in mentions { - let content = if full_mention_content - && let MentionUri::Directory { abs_path } = &mention_uri - { - cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))? - .await? - } else { - task.await.map_err(|e| anyhow!("{e}"))? - }; - - contents.insert(crease_id, (mention_uri, content)); - } - Ok(contents) - }) - } - - fn remove_invalid(&mut self, snapshot: EditorSnapshot) { - for (crease_id, crease) in snapshot.crease_snapshot.creases() { - if !crease.range().start.is_valid(&snapshot.buffer_snapshot()) { - self.mentions.remove(&crease_id); - } - } - } -} - pub struct MessageEditorAddon {} impl MessageEditorAddon { @@ -1622,7 +1030,7 @@ mod tests { use agent::{HistoryStore, outline}; use agent_client_protocol as acp; use assistant_text_thread::TextThreadStore; - use editor::{AnchorRangeExt as _, Editor, EditorMode}; + use editor::{AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset}; use fs::FakeFs; use futures::StreamExt as _; use gpui::{ @@ -1660,7 +1068,7 @@ mod tests { cx.new(|cx| { MessageEditor::new( workspace.downgrade(), - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), @@ -1771,7 +1179,7 @@ mod tests { cx.new(|cx| { MessageEditor::new( workspace_handle.clone(), - project.clone(), + project.downgrade(), history_store.clone(), None, prompt_capabilities.clone(), @@ -1805,12 +1213,7 @@ mod tests { assert!(error_message.contains("Available commands: none")); // Now simulate Claude providing its list of available commands (which doesn't include file) - available_commands.replace(vec![acp::AvailableCommand { - name: "help".to_string(), - description: "Get help".to_string(), - input: None, - meta: None, - }]); + available_commands.replace(vec![acp::AvailableCommand::new("help", "Get help")]); // Test that unsupported slash commands trigger an error when we have a list of available commands editor.update_in(cx, |editor, window, cx| { @@ -1924,20 +1327,12 @@ mod tests { let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![ - acp::AvailableCommand { - name: "quick-math".to_string(), - description: "2 + 2 = 4 - 1 = 3".to_string(), - input: None, - meta: None, - }, - acp::AvailableCommand { - name: "say-hello".to_string(), - description: "Say hello to whoever you want".to_string(), - input: Some(acp::AvailableCommandInput::Unstructured { - hint: "".to_string(), - }), - meta: None, - }, + acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"), + acp::AvailableCommand::new("say-hello", "Say hello to whoever you want").input( + acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput::new( + "", + )), + ), ])); let editor = workspace.update_in(&mut cx, |workspace, window, cx| { @@ -1945,7 +1340,7 @@ mod tests { let message_editor = cx.new(|cx| { MessageEditor::new( workspace_handle, - project.clone(), + project.downgrade(), history_store.clone(), None, prompt_capabilities.clone(), @@ -1970,7 +1365,7 @@ mod tests { cx, ); }); - message_editor.read(cx).focus_handle(cx).focus(window); + message_editor.read(cx).focus_handle(cx).focus(window, cx); message_editor.read(cx).editor().clone() }); @@ -2136,7 +1531,7 @@ mod tests { rel_path("b/eight.txt"), ]; - let slash = PathStyle::local().separator(); + let slash = PathStyle::local().primary_separator(); let mut opened_editors = Vec::new(); for path in paths { @@ -2167,7 +1562,7 @@ mod tests { let message_editor = cx.new(|cx| { MessageEditor::new( workspace_handle, - project.clone(), + project.downgrade(), history_store.clone(), None, prompt_capabilities.clone(), @@ -2192,7 +1587,7 @@ mod tests { cx, ); }); - message_editor.read(cx).focus_handle(cx).focus(window); + message_editor.read(cx).focus_handle(cx).focus(window, cx); let editor = message_editor.read(cx).editor().clone(); (message_editor, editor) }); @@ -2217,12 +1612,12 @@ mod tests { editor.set_text("", window, cx); }); - prompt_capabilities.replace(acp::PromptCapabilities { - image: true, - audio: true, - embedded_context: true, - meta: None, - }); + prompt_capabilities.replace( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ); cx.simulate_input("Lorem "); @@ -2300,7 +1695,7 @@ mod tests { .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(false, project.clone(), cx) + .update(cx, |mention_set, cx| mention_set.contents(false, cx)) }) .await .unwrap() @@ -2355,7 +1750,7 @@ mod tests { .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(false, project.clone(), cx) + .update(cx, |mention_set, cx| mention_set.contents(false, cx)) }) .await .unwrap() @@ -2478,7 +1873,7 @@ mod tests { .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(false, project.clone(), cx) + .update(cx, |mention_set, cx| mention_set.contents(false, cx)) }) .await .unwrap() @@ -2526,7 +1921,7 @@ mod tests { .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(false, project.clone(), cx) + .update(cx, |mention_set, cx| mention_set.contents(false, cx)) }) .await .expect_err("Should fail to load x.png"); @@ -2579,7 +1974,7 @@ mod tests { .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(false, project.clone(), cx) + .update(cx, |mention_set, cx| mention_set.contents(false, cx)) }) .await .unwrap(); @@ -2591,7 +1986,7 @@ mod tests { editor.display_map.update(cx, |display_map, cx| { display_map .snapshot(cx) - .folds_in_range(0..snapshot.len()) + .folds_in_range(MultiBufferOffset(0)..snapshot.len()) .map(|fold| fold.range.to_point(&snapshot)) .collect() }) @@ -2622,13 +2017,14 @@ mod tests { } #[gpui::test] - async fn test_large_file_mention_uses_outline(cx: &mut TestAppContext) { + async fn test_large_file_mention_fallback(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); // Create a large file that exceeds AUTO_OUTLINE_SIZE - const LINE: &str = "fn example_function() { /* some code */ }\n"; + // Using plain text without a configured language, so no outline is available + const LINE: &str = "This is a line of text in the file\n"; let large_content = LINE.repeat(2 * (outline::AUTO_OUTLINE_SIZE / LINE.len())); assert!(large_content.len() > outline::AUTO_OUTLINE_SIZE); @@ -2639,8 +2035,8 @@ mod tests { fs.insert_tree( "/project", json!({ - "large_file.rs": large_content.clone(), - "small_file.rs": small_content, + "large_file.txt": large_content.clone(), + "small_file.txt": small_content, }), ) .await; @@ -2657,7 +2053,7 @@ mod tests { cx.new(|cx| { let editor = MessageEditor::new( workspace.downgrade(), - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), @@ -2672,11 +2068,9 @@ mod tests { cx, ); // Enable embedded context so files are actually included - editor.prompt_capabilities.replace(acp::PromptCapabilities { - embedded_context: true, - meta: None, - ..Default::default() - }); + editor + .prompt_capabilities + .replace(acp::PromptCapabilities::new().embedded_context(true)); editor }) }); @@ -2686,20 +2080,31 @@ mod tests { let large_file_abs_path = project.read_with(cx, |project, cx| { let worktree = project.worktrees(cx).next().unwrap(); let worktree_root = worktree.read(cx).abs_path(); - worktree_root.join("large_file.rs") + worktree_root.join("large_file.txt") }); let large_file_task = message_editor.update(cx, |editor, cx| { - editor.confirm_mention_for_file(large_file_abs_path, cx) + editor.mention_set().update(cx, |set, cx| { + set.confirm_mention_for_file(large_file_abs_path, true, cx) + }) }); let large_file_mention = large_file_task.await.unwrap(); match large_file_mention { Mention::Text { content, .. } => { - // Should contain outline header for large files - assert!(content.contains("File outline for")); - assert!(content.contains("file too large to show full content")); - // Should not contain the full repeated content - assert!(!content.contains(&LINE.repeat(100))); + // Should contain some of the content but not all of it + assert!( + content.contains(LINE), + "Should contain some of the file content" + ); + assert!( + !content.contains(&LINE.repeat(100)), + "Should not contain the full file" + ); + // Should be much smaller than original + assert!( + content.len() < large_content.len() / 10, + "Should be significantly truncated" + ); } _ => panic!("Expected Text mention for large file"), } @@ -2709,19 +2114,19 @@ mod tests { let small_file_abs_path = project.read_with(cx, |project, cx| { let worktree = project.worktrees(cx).next().unwrap(); let worktree_root = worktree.read(cx).abs_path(); - worktree_root.join("small_file.rs") + worktree_root.join("small_file.txt") }); let small_file_task = message_editor.update(cx, |editor, cx| { - editor.confirm_mention_for_file(small_file_abs_path, cx) + editor.mention_set().update(cx, |set, cx| { + set.confirm_mention_for_file(small_file_abs_path, true, cx) + }) }); let small_file_mention = small_file_task.await.unwrap(); match small_file_mention { Mention::Text { content, .. } => { - // Should contain the actual content + // Should contain the full actual content assert_eq!(content, small_content); - // Should not contain outline header - assert!(!content.contains("File outline for")); } _ => panic!("Expected Text mention for small file"), } @@ -2744,7 +2149,7 @@ mod tests { // Create a thread metadata to insert as summary let thread_metadata = agent::DbThreadMetadata { - id: acp::SessionId("thread-123".into()), + id: acp::SessionId::new("thread-123"), title: "Previous Conversation".into(), updated_at: chrono::Utc::now(), }; @@ -2753,7 +2158,7 @@ mod tests { cx.new(|cx| { let mut editor = MessageEditor::new( workspace.downgrade(), - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), @@ -2789,7 +2194,7 @@ mod tests { text ); - let mentions = editor.mentions(); + let mentions = editor.mention_set().read(cx).mentions(); assert_eq!( mentions.len(), 1, @@ -2822,7 +2227,7 @@ mod tests { cx.new(|cx| { MessageEditor::new( workspace.downgrade(), - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), @@ -2851,14 +2256,7 @@ mod tests { .await .unwrap(); - assert_eq!( - content, - vec![acp::ContentBlock::Text(acp::TextContent { - text: "してhello world".into(), - annotations: None, - meta: None - })] - ); + assert_eq!(content, vec!["してhello world".into()]); } #[gpui::test] @@ -2892,7 +2290,7 @@ mod tests { let message_editor = cx.new(|cx| { MessageEditor::new( workspace_handle, - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), @@ -2917,7 +2315,7 @@ mod tests { cx, ); }); - message_editor.read(cx).focus_handle(cx).focus(window); + message_editor.read(cx).focus_handle(cx).focus(window, cx); let editor = message_editor.read(cx).editor().clone(); (message_editor, editor) }); @@ -2937,38 +2335,24 @@ mod tests { .0; let main_rs_uri = if cfg!(windows) { - "file:///C:/project/src/main.rs".to_string() + "file:///C:/project/src/main.rs" } else { - "file:///project/src/main.rs".to_string() + "file:///project/src/main.rs" }; // When embedded context is `false` we should get a resource link pretty_assertions::assert_eq!( content, vec![ - acp::ContentBlock::Text(acp::TextContent { - text: "What is in ".to_string(), - annotations: None, - meta: None - }), - acp::ContentBlock::ResourceLink(acp::ResourceLink { - uri: main_rs_uri.clone(), - name: "main.rs".to_string(), - annotations: None, - meta: None, - description: None, - mime_type: None, - size: None, - title: None, - }) + "What is in ".into(), + acp::ContentBlock::ResourceLink(acp::ResourceLink::new("main.rs", main_rs_uri)) ] ); message_editor.update(cx, |editor, _cx| { - editor.prompt_capabilities.replace(acp::PromptCapabilities { - embedded_context: true, - ..Default::default() - }) + editor + .prompt_capabilities + .replace(acp::PromptCapabilities::new().embedded_context(true)) }); let content = message_editor @@ -2981,23 +2365,12 @@ mod tests { pretty_assertions::assert_eq!( content, vec![ - acp::ContentBlock::Text(acp::TextContent { - text: "What is in ".to_string(), - annotations: None, - meta: None - }), - acp::ContentBlock::Resource(acp::EmbeddedResource { - resource: acp::EmbeddedResourceResource::TextResourceContents( - acp::TextResourceContents { - text: file_content.to_string(), - uri: main_rs_uri, - mime_type: None, - meta: None - } - ), - annotations: None, - meta: None - }) + "What is in ".into(), + acp::ContentBlock::Resource(acp::EmbeddedResource::new( + acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents::new(file_content, main_rs_uri) + ) + )) ] ); } @@ -3075,7 +2448,7 @@ mod tests { let message_editor = cx.new(|cx| { MessageEditor::new( workspace_handle, - project.clone(), + project.downgrade(), history_store.clone(), None, Default::default(), diff --git a/crates/agent_ui/src/acp/mode_selector.rs b/crates/agent_ui/src/acp/mode_selector.rs index aed151de728ce2e802154a73d4add9681a410933..1f50ce74321d393ba6c7f5083bd889bc3dc2c0e1 100644 --- a/crates/agent_ui/src/acp/mode_selector.rs +++ b/crates/agent_ui/src/acp/mode_selector.rs @@ -11,7 +11,7 @@ use ui::{ PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*, }; -use crate::{CycleModeSelector, ToggleProfileSelector}; +use crate::{CycleModeSelector, ToggleProfileSelector, ui::HoldForDefault}; pub struct ModeSelector { connection: Rc, @@ -56,6 +56,10 @@ impl ModeSelector { self.set_mode(all_modes[next_index].id.clone(), cx); } + pub fn mode(&self) -> acp::SessionModeId { + self.connection.current_mode() + } + pub fn set_mode(&mut self, mode: acp::SessionModeId, cx: &mut Context) { let task = self.connection.set_mode(mode, cx); self.setting_mode = true; @@ -104,36 +108,11 @@ impl ModeSelector { entry.documentation_aside(side, DocumentationEdge::Bottom, { let description = description.clone(); - move |cx| { + move |_| { v_flex() .gap_1() .child(Label::new(description.clone())) - .child( - h_flex() - .pt_1() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .gap_0p5() - .text_sm() - .text_color(Color::Muted.color(cx)) - .child("Hold") - .child(h_flex().flex_shrink_0().children( - ui::render_modifiers( - &gpui::Modifiers::secondary_key(), - PlatformStyle::platform(), - None, - Some(ui::TextSize::Default.rems(cx).into()), - true, - ), - )) - .child(div().map(|this| { - if is_default { - this.child("to also unset as default") - } else { - this.child("to also set as default") - } - })), - ) + .child(HoldForDefault::new(is_default)) .into_any_element() } }) @@ -182,7 +161,7 @@ impl Render for ModeSelector { .map(|mode| mode.name.clone()) .unwrap_or_else(|| "Unknown".into()); - let this = cx.entity(); + let this = cx.weak_entity(); let icon = if self.menu_handle.is_deployed() { IconName::ChevronUp @@ -243,7 +222,8 @@ impl Render for ModeSelector { y: px(-2.0), }) .menu(move |window, cx| { - Some(this.update(cx, |this, cx| this.build_context_menu(window, cx))) + this.update(cx, |this, cx| this.build_context_menu(window, cx)) + .ok() }) } } diff --git a/crates/agent_ui/src/acp/model_selector.rs b/crates/agent_ui/src/acp/model_selector.rs index 45fec558720fc5e88548f6dd7bc24fe624a908f5..cff5334a00472fd6f49abcb17897b4ed3c9f590e 100644 --- a/crates/agent_ui/src/acp/model_selector.rs +++ b/crates/agent_ui/src/acp/model_selector.rs @@ -1,27 +1,39 @@ use std::{cmp::Reverse, rc::Rc, sync::Arc}; use acp_thread::{AgentModelInfo, AgentModelList, AgentModelSelector}; +use agent_client_protocol::ModelId; +use agent_servers::AgentServer; +use agent_settings::AgentSettings; use anyhow::Result; -use collections::IndexMap; +use collections::{HashSet, IndexMap}; +use fs::Fs; use futures::FutureExt; use fuzzy::{StringMatchCandidate, match_strings}; -use gpui::{AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, WeakEntity}; +use gpui::{ + Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, FocusHandle, Task, WeakEntity, +}; +use itertools::Itertools; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; -use ui::{ - DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, ListItem, - ListItemSpacing, prelude::*, -}; +use settings::Settings; +use ui::{DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, prelude::*}; use util::ResultExt; +use zed_actions::agent::OpenSettings; + +use crate::ui::{HoldForDefault, ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem}; pub type AcpModelSelector = Picker; pub fn acp_model_selector( selector: Rc, + agent_server: Rc, + fs: Arc, + focus_handle: FocusHandle, window: &mut Window, cx: &mut Context, ) -> AcpModelSelector { - let delegate = AcpModelPickerDelegate::new(selector, window, cx); + let delegate = + AcpModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx); Picker::list(delegate, window, cx) .show_scrollbar(true) .width(rems(20.)) @@ -30,22 +42,28 @@ pub fn acp_model_selector( enum AcpModelPickerEntry { Separator(SharedString), - Model(AgentModelInfo), + Model(AgentModelInfo, bool), } pub struct AcpModelPickerDelegate { selector: Rc, + agent_server: Rc, + fs: Arc, filtered_entries: Vec, models: Option, selected_index: usize, - selected_description: Option<(usize, SharedString)>, + selected_description: Option<(usize, SharedString, bool)>, selected_model: Option, _refresh_models_task: Task<()>, + focus_handle: FocusHandle, } impl AcpModelPickerDelegate { fn new( selector: Rc, + agent_server: Rc, + fs: Arc, + focus_handle: FocusHandle, window: &mut Window, cx: &mut Context, ) -> Self { @@ -86,18 +104,82 @@ impl AcpModelPickerDelegate { Self { selector, + agent_server, + fs, filtered_entries: Vec::new(), models: None, selected_model: None, selected_index: 0, selected_description: None, _refresh_models_task: refresh_models_task, + focus_handle, } } pub fn active_model(&self) -> Option<&AgentModelInfo> { self.selected_model.as_ref() } + + pub fn cycle_favorite_models(&mut self, window: &mut Window, cx: &mut Context>) { + if !self.selector.supports_favorites() { + return; + } + + let favorites = AgentSettings::get_global(cx).favorite_model_ids(); + + if favorites.is_empty() { + return; + } + + let Some(models) = self.models.clone() else { + return; + }; + + let all_models: Vec = match models { + AgentModelList::Flat(list) => list, + AgentModelList::Grouped(index_map) => index_map + .into_values() + .flatten() + .collect::>(), + }; + + let favorite_models = all_models + .iter() + .filter(|model| favorites.contains(&model.id)) + .unique_by(|model| &model.id) + .cloned() + .collect::>(); + + let current_id = self.selected_model.as_ref().map(|m| m.id.clone()); + + let current_index_in_favorites = current_id + .as_ref() + .and_then(|id| favorite_models.iter().position(|m| &m.id == id)) + .unwrap_or(usize::MAX); + + let next_index = if current_index_in_favorites == usize::MAX { + 0 + } else { + (current_index_in_favorites + 1) % favorite_models.len() + }; + + let next_model = favorite_models[next_index].clone(); + + self.selector + .select_model(next_model.id.clone(), cx) + .detach_and_log_err(cx); + + self.selected_model = Some(next_model); + + // Keep the picker selection aligned with the newly-selected model + if let Some(new_index) = self.filtered_entries.iter().position(|entry| { + matches!(entry, AcpModelPickerEntry::Model(model_info, _) if self.selected_model.as_ref().is_some_and(|selected| model_info.id == selected.id)) + }) { + self.set_selected_index(new_index, window, cx); + } else { + cx.notify(); + } + } } impl PickerDelegate for AcpModelPickerDelegate { @@ -123,7 +205,7 @@ impl PickerDelegate for AcpModelPickerDelegate { _cx: &mut Context>, ) -> bool { match self.filtered_entries.get(ix) { - Some(AcpModelPickerEntry::Model(_)) => true, + Some(AcpModelPickerEntry::Model(_, _)) => true, Some(AcpModelPickerEntry::Separator(_)) | None => false, } } @@ -138,6 +220,12 @@ impl PickerDelegate for AcpModelPickerDelegate { window: &mut Window, cx: &mut Context>, ) -> Task<()> { + let favorites = if self.selector.supports_favorites() { + Arc::new(AgentSettings::get_global(cx).favorite_model_ids()) + } else { + Default::default() + }; + cx.spawn_in(window, async move |this, cx| { let filtered_models = match this .read_with(cx, |this, cx| { @@ -154,7 +242,7 @@ impl PickerDelegate for AcpModelPickerDelegate { this.update_in(cx, |this, window, cx| { this.delegate.filtered_entries = - info_list_to_picker_entries(filtered_models).collect(); + info_list_to_picker_entries(filtered_models, favorites); // Finds the currently selected model in the list let new_index = this .delegate @@ -162,7 +250,7 @@ impl PickerDelegate for AcpModelPickerDelegate { .as_ref() .and_then(|selected| { this.delegate.filtered_entries.iter().position(|entry| { - if let AcpModelPickerEntry::Model(model_info) = entry { + if let AcpModelPickerEntry::Model(model_info, _) = entry { model_info.id == selected.id } else { false @@ -178,9 +266,24 @@ impl PickerDelegate for AcpModelPickerDelegate { } fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - if let Some(AcpModelPickerEntry::Model(model_info)) = + if let Some(AcpModelPickerEntry::Model(model_info, _)) = self.filtered_entries.get(self.selected_index) { + if window.modifiers().secondary() { + let default_model = self.agent_server.default_model(cx); + let is_default = default_model.as_ref() == Some(&model_info.id); + + self.agent_server.set_default_model( + if is_default { + None + } else { + Some(model_info.id.clone()) + }, + self.fs.clone(), + cx, + ); + } + self.selector .select_model(model_info.id.clone(), cx) .detach_and_log_err(cx); @@ -206,73 +309,56 @@ impl PickerDelegate for AcpModelPickerDelegate { cx: &mut Context>, ) -> Option { match self.filtered_entries.get(ix)? { - AcpModelPickerEntry::Separator(title) => Some( - div() - .px_2() - .pb_1() - .when(ix > 1, |this| { - this.mt_1() - .pt_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - }) - .child( - Label::new(title) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - .into_any_element(), - ), - AcpModelPickerEntry::Model(model_info) => { + AcpModelPickerEntry::Separator(title) => { + Some(ModelSelectorHeader::new(title, ix > 1).into_any_element()) + } + AcpModelPickerEntry::Model(model_info, is_favorite) => { let is_selected = Some(model_info) == self.selected_model.as_ref(); - - let model_icon_color = if is_selected { - Color::Accent - } else { - Color::Muted + let default_model = self.agent_server.default_model(cx); + let is_default = default_model.as_ref() == Some(&model_info.id); + + let supports_favorites = self.selector.supports_favorites(); + + let is_favorite = *is_favorite; + let handle_action_click = { + let model_id = model_info.id.clone(); + let fs = self.fs.clone(); + + move |cx: &App| { + crate::favorite_models::toggle_model_id_in_settings( + model_id.clone(), + !is_favorite, + fs.clone(), + cx, + ); + } }; Some( div() .id(("model-picker-menu-child", ix)) .when_some(model_info.description.clone(), |this, description| { - this - .on_hover(cx.listener(move |menu, hovered, _, cx| { - if *hovered { - menu.delegate.selected_description = Some((ix, description.clone())); - } else if matches!(menu.delegate.selected_description, Some((id, _)) if id == ix) { - menu.delegate.selected_description = None; - } - cx.notify(); - })) + this.on_hover(cx.listener(move |menu, hovered, _, cx| { + if *hovered { + menu.delegate.selected_description = + Some((ix, description.clone(), is_default)); + } else if matches!(menu.delegate.selected_description, Some((id, _, _)) if id == ix) { + menu.delegate.selected_description = None; + } + cx.notify(); + })) }) .child( - ListItem::new(ix) - .inset(true) - .spacing(ListItemSpacing::Sparse) - .toggle_state(selected) - .start_slot::(model_info.icon.map(|icon| { - Icon::new(icon) - .color(model_icon_color) - .size(IconSize::Small) - })) - .child( - h_flex() - .w_full() - .pl_0p5() - .gap_1p5() - .w(px(240.)) - .child(Label::new(model_info.name.clone()).truncate()), - ) - .end_slot(div().pr_3().when(is_selected, |this| { - this.child( - Icon::new(IconName::Check) - .color(Color::Accent) - .size(IconSize::Small), - ) - })), + ModelSelectorListItem::new(ix, model_info.name.clone()) + .when_some(model_info.icon, |this, icon| this.icon(icon)) + .is_selected(is_selected) + .is_focused(selected) + .when(supports_favorites, |this| { + this.is_favorite(is_favorite) + .on_toggle_favorite(handle_action_click) + }), ) - .into_any_element() + .into_any_element(), ) } } @@ -283,31 +369,88 @@ impl PickerDelegate for AcpModelPickerDelegate { _window: &mut Window, _cx: &mut Context>, ) -> Option { - self.selected_description.as_ref().map(|(_, description)| { - let description = description.clone(); - DocumentationAside::new( - DocumentationSide::Left, - DocumentationEdge::Top, - Rc::new(move |_| Label::new(description.clone()).into_any_element()), - ) - }) + self.selected_description + .as_ref() + .map(|(_, description, is_default)| { + let description = description.clone(); + let is_default = *is_default; + + DocumentationAside::new( + DocumentationSide::Left, + DocumentationEdge::Top, + Rc::new(move |_| { + v_flex() + .gap_1() + .child(Label::new(description.clone())) + .child(HoldForDefault::new(is_default)) + .into_any_element() + }), + ) + }) + } + + fn render_footer( + &self, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + let focus_handle = self.focus_handle.clone(); + + if !self.selector.should_render_footer() { + return None; + } + + Some(ModelSelectorFooter::new(OpenSettings.boxed_clone(), focus_handle).into_any_element()) } } fn info_list_to_picker_entries( model_list: AgentModelList, -) -> impl Iterator { + favorites: Arc>, +) -> Vec { + let mut entries = Vec::new(); + + let all_models: Vec<_> = match &model_list { + AgentModelList::Flat(list) => list.iter().collect(), + AgentModelList::Grouped(index_map) => index_map.values().flatten().collect(), + }; + + let favorite_models: Vec<_> = all_models + .iter() + .filter(|m| favorites.contains(&m.id)) + .unique_by(|m| &m.id) + .collect(); + + let has_favorites = !favorite_models.is_empty(); + if has_favorites { + entries.push(AcpModelPickerEntry::Separator("Favorite".into())); + for model in favorite_models { + entries.push(AcpModelPickerEntry::Model((*model).clone(), true)); + } + } + match model_list { AgentModelList::Flat(list) => { - itertools::Either::Left(list.into_iter().map(AcpModelPickerEntry::Model)) + if has_favorites { + entries.push(AcpModelPickerEntry::Separator("All".into())); + } + for model in list { + let is_favorite = favorites.contains(&model.id); + entries.push(AcpModelPickerEntry::Model(model, is_favorite)); + } } AgentModelList::Grouped(index_map) => { - itertools::Either::Right(index_map.into_iter().flat_map(|(group_name, models)| { - std::iter::once(AcpModelPickerEntry::Separator(group_name.0)) - .chain(models.into_iter().map(AcpModelPickerEntry::Model)) - })) + for (group_name, models) in index_map { + entries.push(AcpModelPickerEntry::Separator(group_name.0)); + for model in models { + let is_favorite = favorites.contains(&model.id); + entries.push(AcpModelPickerEntry::Model(model, is_favorite)); + } + } } } + + entries } async fn fuzzy_search( @@ -323,9 +466,7 @@ async fn fuzzy_search( let candidates = model_list .iter() .enumerate() - .map(|(ix, model)| { - StringMatchCandidate::new(ix, &format!("{}/{}", model.id, model.name)) - }) + .map(|(ix, model)| StringMatchCandidate::new(ix, model.name.as_ref())) .collect::>(); let mut matches = match_strings( &candidates, @@ -384,7 +525,7 @@ mod tests { models .into_iter() .map(|model| acp_thread::AgentModelInfo { - id: acp::ModelId(model.to_string().into()), + id: acp::ModelId::new(model.to_string()), name: model.to_string().into(), description: None, icon: None, @@ -431,6 +572,170 @@ mod tests { } } + fn create_favorites(models: Vec<&str>) -> Arc> { + Arc::new( + models + .into_iter() + .map(|m| ModelId::new(m.to_string())) + .collect(), + ) + } + + fn get_entry_model_ids(entries: &[AcpModelPickerEntry]) -> Vec<&str> { + entries + .iter() + .filter_map(|entry| match entry { + AcpModelPickerEntry::Model(info, _) => Some(info.id.0.as_ref()), + _ => None, + }) + .collect() + } + + fn get_entry_labels(entries: &[AcpModelPickerEntry]) -> Vec<&str> { + entries + .iter() + .map(|entry| match entry { + AcpModelPickerEntry::Model(info, _) => info.id.0.as_ref(), + AcpModelPickerEntry::Separator(s) => &s, + }) + .collect() + } + + #[gpui::test] + fn test_favorites_section_appears_when_favorites_exist(_cx: &mut TestAppContext) { + let models = create_model_list(vec![ + ("zed", vec!["zed/claude", "zed/gemini"]), + ("openai", vec!["openai/gpt-5"]), + ]); + let favorites = create_favorites(vec!["zed/gemini"]); + + let entries = info_list_to_picker_entries(models, favorites); + + assert!(matches!( + entries.first(), + Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite" + )); + + let model_ids = get_entry_model_ids(&entries); + assert_eq!(model_ids[0], "zed/gemini"); + } + + #[gpui::test] + fn test_no_favorites_section_when_no_favorites(_cx: &mut TestAppContext) { + let models = create_model_list(vec![("zed", vec!["zed/claude", "zed/gemini"])]); + let favorites = create_favorites(vec![]); + + let entries = info_list_to_picker_entries(models, favorites); + + assert!(matches!( + entries.first(), + Some(AcpModelPickerEntry::Separator(s)) if s == "zed" + )); + } + + #[gpui::test] + fn test_models_have_correct_actions(_cx: &mut TestAppContext) { + let models = create_model_list(vec![ + ("zed", vec!["zed/claude", "zed/gemini"]), + ("openai", vec!["openai/gpt-5"]), + ]); + let favorites = create_favorites(vec!["zed/claude"]); + + let entries = info_list_to_picker_entries(models, favorites); + + for entry in &entries { + if let AcpModelPickerEntry::Model(info, is_favorite) = entry { + if info.id.0.as_ref() == "zed/claude" { + assert!(is_favorite, "zed/claude should be a favorite"); + } else { + assert!(!is_favorite, "{} should not be a favorite", info.id.0); + } + } + } + } + + #[gpui::test] + fn test_favorites_appear_in_both_sections(_cx: &mut TestAppContext) { + let models = create_model_list(vec![ + ("zed", vec!["zed/claude", "zed/gemini"]), + ("openai", vec!["openai/gpt-5", "openai/gpt-4"]), + ]); + let favorites = create_favorites(vec!["zed/gemini", "openai/gpt-5"]); + + let entries = info_list_to_picker_entries(models, favorites); + let model_ids = get_entry_model_ids(&entries); + + assert_eq!(model_ids[0], "zed/gemini"); + assert_eq!(model_ids[1], "openai/gpt-5"); + + assert!(model_ids[2..].contains(&"zed/gemini")); + assert!(model_ids[2..].contains(&"openai/gpt-5")); + } + + #[gpui::test] + fn test_favorites_are_not_duplicated_when_repeated_in_other_sections(_cx: &mut TestAppContext) { + let models = create_model_list(vec![ + ("Recommended", vec!["zed/claude", "anthropic/claude"]), + ("Zed", vec!["zed/claude", "zed/gpt-5"]), + ("Antropic", vec!["anthropic/claude"]), + ("OpenAI", vec!["openai/gpt-5"]), + ]); + + let favorites = create_favorites(vec!["zed/claude"]); + + let entries = info_list_to_picker_entries(models, favorites); + let labels = get_entry_labels(&entries); + + assert_eq!( + labels, + vec![ + "Favorite", + "zed/claude", + "Recommended", + "zed/claude", + "anthropic/claude", + "Zed", + "zed/claude", + "zed/gpt-5", + "Antropic", + "anthropic/claude", + "OpenAI", + "openai/gpt-5" + ] + ); + } + + #[gpui::test] + fn test_flat_model_list_with_favorites(_cx: &mut TestAppContext) { + let models = AgentModelList::Flat(vec![ + acp_thread::AgentModelInfo { + id: acp::ModelId::new("zed/claude".to_string()), + name: "Claude".into(), + description: None, + icon: None, + }, + acp_thread::AgentModelInfo { + id: acp::ModelId::new("zed/gemini".to_string()), + name: "Gemini".into(), + description: None, + icon: None, + }, + ]); + let favorites = create_favorites(vec!["zed/gemini"]); + + let entries = info_list_to_picker_entries(models, favorites); + + assert!(matches!( + entries.first(), + Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite" + )); + + assert!(entries.iter().any(|e| matches!( + e, + AcpModelPickerEntry::Separator(s) if s == "All" + ))); + } + #[gpui::test] async fn test_fuzzy_match(cx: &mut TestAppContext) { let models = create_model_list(vec![ diff --git a/crates/agent_ui/src/acp/model_selector_popover.rs b/crates/agent_ui/src/acp/model_selector_popover.rs index 2e8ade95ffcb65d8c7742b60fa0facc70358ae1e..d6709081863c9545fba4c6e2304f195e77b013df 100644 --- a/crates/agent_ui/src/acp/model_selector_popover.rs +++ b/crates/agent_ui/src/acp/model_selector_popover.rs @@ -1,14 +1,17 @@ use std::rc::Rc; +use std::sync::Arc; use acp_thread::{AgentModelInfo, AgentModelSelector}; +use agent_servers::AgentServer; +use agent_settings::AgentSettings; +use fs::Fs; use gpui::{Entity, FocusHandle}; use picker::popover_menu::PickerPopoverMenu; -use ui::{ - ButtonLike, Context, IntoElement, PopoverMenuHandle, SharedString, TintColor, Tooltip, Window, - prelude::*, -}; +use settings::Settings as _; +use ui::{ButtonLike, KeyBinding, PopoverMenuHandle, TintColor, Tooltip, prelude::*}; use zed_actions::agent::ToggleModelSelector; +use crate::CycleFavoriteModels; use crate::acp::{AcpModelSelector, model_selector::acp_model_selector}; pub struct AcpModelSelectorPopover { @@ -20,13 +23,25 @@ pub struct AcpModelSelectorPopover { impl AcpModelSelectorPopover { pub(crate) fn new( selector: Rc, + agent_server: Rc, + fs: Arc, menu_handle: PopoverMenuHandle, focus_handle: FocusHandle, window: &mut Window, cx: &mut Context, ) -> Self { + let focus_handle_clone = focus_handle.clone(); Self { - selector: cx.new(move |cx| acp_model_selector(selector, window, cx)), + selector: cx.new(move |cx| { + acp_model_selector( + selector, + agent_server, + fs, + focus_handle_clone.clone(), + window, + cx, + ) + }), menu_handle, focus_handle, } @@ -39,6 +54,12 @@ impl AcpModelSelectorPopover { pub fn active_model<'a>(&self, cx: &'a App) -> Option<&'a AgentModelInfo> { self.selector.read(cx).delegate.active_model() } + + pub fn cycle_favorite_models(&self, window: &mut Window, cx: &mut Context) { + self.selector.update(cx, |selector, cx| { + selector.delegate.cycle_favorite_models(window, cx); + }); + } } impl Render for AcpModelSelectorPopover { @@ -59,6 +80,46 @@ impl Render for AcpModelSelectorPopover { (Color::Muted, IconName::ChevronDown) }; + let tooltip = Tooltip::element({ + move |_, cx| { + let focus_handle = focus_handle.clone(); + let should_show_cycle_row = !AgentSettings::get_global(cx) + .favorite_model_ids() + .is_empty(); + + v_flex() + .gap_1() + .child( + h_flex() + .gap_2() + .justify_between() + .child(Label::new("Change Model")) + .child(KeyBinding::for_action_in( + &ToggleModelSelector, + &focus_handle, + cx, + )), + ) + .when(should_show_cycle_row, |this| { + this.child( + h_flex() + .pt_1() + .gap_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .justify_between() + .child(Label::new("Cycle Favorited Models")) + .child(KeyBinding::for_action_in( + &CycleFavoriteModels, + &focus_handle, + cx, + )), + ) + }) + .into_any() + } + }); + PickerPopoverMenu::new( self.selector.clone(), ButtonLike::new("active-model") @@ -73,9 +134,7 @@ impl Render for AcpModelSelectorPopover { .ml_0p5(), ) .child(Icon::new(icon).color(Color::Muted).size(IconSize::XSmall)), - move |_window, cx| { - Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) - }, + tooltip, gpui::Corner::BottomRight, cx, ) diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index 11718c63475212fbe8b996b2f6edae8b4295c91a..1aa89b35d34c8c0543a56014fee7766b6de66eb2 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -1,5 +1,5 @@ use crate::acp::AcpThreadView; -use crate::{AgentPanel, RemoveSelectedThread}; +use crate::{AgentPanel, RemoveHistory, RemoveSelectedThread}; use agent::{HistoryEntry, HistoryStore}; use chrono::{Datelike as _, Local, NaiveDate, TimeDelta}; use editor::{Editor, EditorEvent}; @@ -12,7 +12,7 @@ use std::{fmt::Display, ops::Range}; use text::Bias; use time::{OffsetDateTime, UtcOffset}; use ui::{ - HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tooltip, WithScrollbar, + HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip, WithScrollbar, prelude::*, }; @@ -25,6 +25,7 @@ pub struct AcpThreadHistory { search_query: SharedString, visible_items: Vec, local_timezone: UtcOffset, + confirming_delete_history: bool, _update_task: Task<()>, _subscriptions: Vec, } @@ -98,6 +99,7 @@ impl AcpThreadHistory { ) .unwrap(), search_query: SharedString::default(), + confirming_delete_history: false, _subscriptions: vec![search_editor_subscription, history_store_subscription], _update_task: Task::ready(()), }; @@ -331,6 +333,24 @@ impl AcpThreadHistory { task.detach_and_log_err(cx); } + fn remove_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.history_store.update(cx, |store, cx| { + store.delete_threads(cx).detach_and_log_err(cx) + }); + self.confirming_delete_history = false; + cx.notify(); + } + + fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = true; + cx.notify(); + } + + fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = false; + cx.notify(); + } + fn render_list_items( &mut self, range: Range, @@ -426,9 +446,10 @@ impl AcpThreadHistory { .tooltip(move |_window, cx| { Tooltip::for_action("Delete", &RemoveSelectedThread, cx) }) - .on_click( - cx.listener(move |this, _, _, cx| this.remove_thread(ix, cx)), - ), + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_thread(ix, cx); + cx.stop_propagation() + })), ) } else { None @@ -447,6 +468,8 @@ impl Focusable for AcpThreadHistory { impl Render for AcpThreadHistory { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_no_history = self.history_store.read(cx).is_empty(cx); + v_flex() .key_context("ThreadHistory") .size_full() @@ -457,9 +480,12 @@ impl Render for AcpThreadHistory { .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::remove_selected_thread)) + .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| { + this.remove_history(window, cx); + })) .child( h_flex() - .h(px(41.)) // Match the toolbar perfectly + .h(Tab::container_height(cx)) .w_full() .py_1() .px_2() @@ -481,7 +507,7 @@ impl Render for AcpThreadHistory { .overflow_hidden() .flex_grow(); - if self.history_store.read(cx).is_empty(cx) { + if has_no_history { view.justify_center().items_center().child( Label::new("You don't have any past threads yet.") .size(LabelSize::Small) @@ -502,16 +528,74 @@ impl Render for AcpThreadHistory { ) .p_1() .pr_4() - .track_scroll(self.scroll_handle.clone()) + .track_scroll(&self.scroll_handle) .flex_grow(), ) - .vertical_scrollbar_for( - self.scroll_handle.clone(), - window, - cx, - ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) } }) + .when(!has_no_history, |this| { + this.child( + h_flex() + .p_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .when(!self.confirming_delete_history, |this| { + this.child( + Button::new("delete_history", "Delete All History") + .full_width() + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.prompt_delete_history(window, cx); + })), + ) + }) + .when(self.confirming_delete_history, |this| { + this.w_full() + .gap_2() + .flex_wrap() + .justify_between() + .child( + h_flex() + .flex_wrap() + .gap_1() + .child( + Label::new("Delete all threads?") + .size(LabelSize::Small), + ) + .child( + Label::new("You won't be able to recover them later.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .child( + h_flex() + .gap_1() + .child( + Button::new("cancel_delete", "Cancel") + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.cancel_delete_history(window, cx); + })), + ) + .child( + Button::new("confirm_delete", "Delete") + .style(ButtonStyle::Tinted(ui::TintColor::Error)) + .color(Color::Error) + .label_size(LabelSize::Small) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action( + Box::new(RemoveHistory), + cx, + ); + })), + ), + ) + }), + ) + }) } } diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 17daf5a18e97829d5e4d64d30d266b5d5d271e7b..9e9af499727ad8478fa5fc1d46dc3b3bf8e20a71 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -51,7 +51,7 @@ use ui::{ PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar, prelude::*, }; use util::{ResultExt, size::format_file_size, time::duration_alt_display}; -use workspace::{CollaboratorId, Workspace}; +use workspace::{CollaboratorId, NewTerminal, Workspace}; use zed_actions::agent::{Chat, ToggleModelSelector}; use zed_actions::assistant::OpenRulesLibrary; @@ -63,14 +63,11 @@ use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; use crate::agent_diff::AgentDiff; use crate::profile_selector::{ProfileProvider, ProfileSelector}; -use crate::ui::{ - AgentNotification, AgentNotificationEvent, BurnModeTooltip, UnavailableEditingTooltip, - UsageCallout, -}; +use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip, UsageCallout}; use crate::{ AgentDiffPane, AgentPanel, AllowAlways, AllowOnce, ContinueThread, ContinueWithBurnMode, - CycleModeSelector, ExpandMessageEditor, Follow, KeepAll, OpenAgentDiff, OpenHistory, RejectAll, - RejectOnce, ToggleBurnMode, ToggleProfileSelector, + CycleFavoriteModels, CycleModeSelector, ExpandMessageEditor, Follow, KeepAll, NewThread, + OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, ToggleBurnMode, ToggleProfileSelector, }; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -100,7 +97,7 @@ impl ThreadError { { Self::ModelRequestLimitReached(error.plan) } else if let Some(acp_error) = error.downcast_ref::() - && acp_error.code == acp::ErrorCode::AUTH_REQUIRED.code + && acp_error.code == acp::ErrorCode::AuthRequired { Self::AuthenticationRequired(acp_error.message.clone().into()) } else { @@ -170,7 +167,7 @@ impl ThreadFeedbackState { } } let session_id = thread.read(cx).session_id().clone(); - let agent = thread.read(cx).connection().telemetry_id(); + let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); let task = telemetry.thread_data(&session_id, cx); let rating = match feedback { ThreadFeedback::Positive => "positive", @@ -180,7 +177,7 @@ impl ThreadFeedbackState { let thread = task.await?; telemetry::event!( "Agent Thread Rated", - agent = agent, + agent = agent_telemetry_id, session_id = session_id, rating = rating, thread = thread @@ -207,13 +204,13 @@ impl ThreadFeedbackState { self.comments_editor.take(); let session_id = thread.read(cx).session_id().clone(); - let agent = thread.read(cx).connection().telemetry_id(); + let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); let task = telemetry.thread_data(&session_id, cx); cx.background_spawn(async move { let thread = task.await?; telemetry::event!( "Agent Thread Feedback Comments", - agent = agent, + agent = agent_telemetry_id, session_id = session_id, comments = comments, thread = thread @@ -256,7 +253,7 @@ impl ThreadFeedbackState { editor }); - editor.read(cx).focus_handle(cx).focus(window); + editor.read(cx).focus_handle(cx).focus(window, cx); editor } } @@ -278,6 +275,7 @@ pub struct AcpThreadView { notification_subscriptions: HashMap, Vec>, thread_retry_status: Option, thread_error: Option, + thread_error_markdown: Option>, thread_feedback: ThreadFeedbackState, list_state: ListState, auth_task: Option>, @@ -296,6 +294,7 @@ pub struct AcpThreadView { _cancel_task: Option>, _subscriptions: [Subscription; 5], show_codex_windows_warning: bool, + in_flight_prompt: Option>, } enum ThreadState { @@ -331,6 +330,7 @@ impl AcpThreadView { project: Entity, history_store: Entity, prompt_store: Option>, + track_load_event: bool, window: &mut Window, cx: &mut Context, ) -> Self { @@ -342,7 +342,7 @@ impl AcpThreadView { let message_editor = cx.new(|cx| { let mut editor = MessageEditor::new( workspace.clone(), - project.clone(), + project.downgrade(), history_store.clone(), prompt_store.clone(), prompt_capabilities.clone(), @@ -367,7 +367,7 @@ impl AcpThreadView { let entry_view_state = cx.new(|_| { EntryViewState::new( workspace.clone(), - project.clone(), + project.downgrade(), history_store.clone(), prompt_store.clone(), prompt_capabilities.clone(), @@ -389,8 +389,20 @@ impl AcpThreadView { ), ]; - let show_codex_windows_warning = crate::ExternalAgent::parse_built_in(agent.as_ref()) - == Some(crate::ExternalAgent::Codex); + cx.on_release(|this, cx| { + for window in this.notifications.drain(..) { + window + .update(cx, |_, window, _| { + window.remove_window(); + }) + .ok(); + } + }) + .detach(); + + let show_codex_windows_warning = cfg!(windows) + && project.read(cx).is_local() + && agent.clone().downcast::().is_some(); Self { agent: agent.clone(), @@ -402,6 +414,7 @@ impl AcpThreadView { resume_thread.clone(), workspace.clone(), project.clone(), + track_load_event, window, cx, ), @@ -415,6 +428,7 @@ impl AcpThreadView { list_state: list_state, thread_retry_status: None, thread_error: None, + thread_error_markdown: None, thread_feedback: Default::default(), auth_task: None, expanded_tool_calls: HashSet::default(), @@ -435,6 +449,7 @@ impl AcpThreadView { new_server_version_available: None, resume_thread_metadata: resume_thread, show_codex_windows_warning, + in_flight_prompt: None, } } @@ -444,6 +459,7 @@ impl AcpThreadView { self.resume_thread_metadata.clone(), self.workspace.clone(), self.project.clone(), + true, window, cx, ); @@ -457,6 +473,7 @@ impl AcpThreadView { resume_thread: Option, workspace: WeakEntity, project: Entity, + track_load_event: bool, window: &mut Window, cx: &mut Context, ) -> ThreadState { @@ -515,6 +532,10 @@ impl AcpThreadView { } }; + if track_load_event { + telemetry::event!("Agent Thread Started", agent = connection.telemetry_id()); + } + let result = if let Some(native_agent) = connection .clone() .downcast::() @@ -589,9 +610,13 @@ impl AcpThreadView { .connection() .model_selector(thread.read(cx).session_id()) .map(|selector| { + let agent_server = this.agent.clone(); + let fs = this.project.read(cx).fs().clone(); cx.new(|cx| { AcpModelSelectorPopover::new( selector, + agent_server, + fs, PopoverMenuHandle::default(), this.focus_handle(cx), window, @@ -645,7 +670,6 @@ impl AcpThreadView { mode_selector, _subscriptions: subscriptions, }; - this.message_editor.focus_handle(cx).focus(window); this.profile_selector = this.as_native_thread(cx).map(|thread| { cx.new(|cx| { @@ -658,6 +682,8 @@ impl AcpThreadView { }) }); + this.message_editor.focus_handle(cx).focus(window, cx); + cx.notify(); } Err(err) => { @@ -675,7 +701,7 @@ impl AcpThreadView { this.new_server_version_available = Some(new_version.into()); cx.notify(); }) - .log_err(); + .ok(); } } }) @@ -758,7 +784,7 @@ impl AcpThreadView { _subscription: subscription, }; if this.message_editor.focus_handle(cx).is_focused(window) { - this.focus_handle.focus(window) + this.focus_handle.focus(window, cx) } cx.notify(); }) @@ -778,7 +804,7 @@ impl AcpThreadView { ThreadState::LoadError(LoadError::Other(format!("{:#}", err).into())) } if self.message_editor.focus_handle(cx).is_focused(window) { - self.focus_handle.focus(window) + self.focus_handle.focus(window, cx) } cx.notify(); } @@ -798,6 +824,7 @@ impl AcpThreadView { if should_retry { self.thread_error = None; + self.thread_error_markdown = None; self.reset(window, cx); } } @@ -991,6 +1018,10 @@ impl AcpThreadView { } } + pub fn is_loading(&self) -> bool { + matches!(self.thread_state, ThreadState::Loading { .. }) + } + fn resume_chat(&mut self, cx: &mut Context) { self.thread_error.take(); let Some(thread) = self.thread() else { @@ -1119,8 +1150,8 @@ impl AcpThreadView { let Some(thread) = self.thread() else { return; }; - let agent_telemetry_id = self.agent.telemetry_id(); let session_id = thread.read(cx).session_id().clone(); + let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); let thread = thread.downgrade(); if self.should_be_following { self.workspace @@ -1132,6 +1163,7 @@ impl AcpThreadView { self.is_loading_contents = true; let model_id = self.current_model_id(cx); + let mode_id = self.current_mode_id(cx); let guard = cx.new(|_| ()); cx.observe_release(&guard, |this, _guard, cx| { this.is_loading_contents = false; @@ -1147,6 +1179,7 @@ impl AcpThreadView { } this.update_in(cx, |this, window, cx| { + this.in_flight_prompt = Some(contents.clone()); this.set_editor_is_expanded(false, cx); this.scroll_to_bottom(cx); this.message_editor.update(cx, |message_editor, cx| { @@ -1166,19 +1199,26 @@ impl AcpThreadView { "Agent Message Sent", agent = agent_telemetry_id, session = session_id, - model = model_id + model = model_id, + mode = mode_id ); thread.send(contents, cx) })?; let res = send.await; let turn_time_ms = turn_start_time.elapsed().as_millis(); - let status = if res.is_ok() { "success" } else { "failure" }; + let status = if res.is_ok() { + this.update(cx, |this, _| this.in_flight_prompt.take()).ok(); + "success" + } else { + "failure" + }; telemetry::event!( "Agent Turn Completed", agent = agent_telemetry_id, session = session_id, model = model_id, + mode = mode_id, status, turn_time_ms, ); @@ -1230,7 +1270,7 @@ impl AcpThreadView { } }) }; - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); cx.notify(); } @@ -1255,16 +1295,38 @@ impl AcpThreadView { }; cx.spawn_in(window, async move |this, cx| { + // Check if there are any edits from prompts before the one being regenerated. + // + // If there are, we keep/accept them since we're not regenerating the prompt that created them. + // + // If editing the prompt that generated the edits, they are auto-rejected + // through the `rewind` function in the `acp_thread`. + let has_earlier_edits = thread.read_with(cx, |thread, _| { + thread + .entries() + .iter() + .take(entry_ix) + .any(|entry| entry.diffs().next().is_some()) + })?; + + if has_earlier_edits { + thread.update(cx, |thread, cx| { + thread.action_log().update(cx, |action_log, cx| { + action_log.keep_all_edits(None, cx); + }); + })?; + } + thread .update(cx, |thread, cx| thread.rewind(user_message_id, cx))? .await?; this.update_in(cx, |this, window, cx| { this.send_impl(message_editor, window, cx); - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); })?; anyhow::Ok(()) }) - .detach(); + .detach_and_log_err(cx); } fn open_edited_buffer( @@ -1327,6 +1389,7 @@ impl AcpThreadView { fn clear_thread_error(&mut self, cx: &mut Context) { self.thread_error = None; + self.thread_error_markdown = None; cx.notify(); } @@ -1402,7 +1465,7 @@ impl AcpThreadView { self.thread_retry_status.take(); self.thread_state = ThreadState::LoadError(error.clone()); if self.message_editor.focus_handle(cx).is_focused(window) { - self.focus_handle.focus(window) + self.focus_handle.focus(window, cx) } } AcpThreadEvent::TitleUpdated => { @@ -1430,18 +1493,8 @@ impl AcpThreadView { .iter() .any(|method| method.id.0.as_ref() == "claude-login") { - available_commands.push(acp::AvailableCommand { - name: "login".to_owned(), - description: "Authenticate".to_owned(), - input: None, - meta: None, - }); - available_commands.push(acp::AvailableCommand { - name: "logout".to_owned(), - description: "Authenticate".to_owned(), - input: None, - meta: None, - }); + available_commands.push(acp::AvailableCommand::new("login", "Authenticate")); + available_commands.push(acp::AvailableCommand::new("logout", "Authenticate")); } let has_commands = !available_commands.is_empty(); @@ -1476,6 +1529,7 @@ impl AcpThreadView { else { return; }; + let agent_telemetry_id = connection.telemetry_id(); // Check for the experimental "terminal-auth" _meta field let auth_method = connection.auth_methods().iter().find(|m| m.id == method); @@ -1543,19 +1597,18 @@ impl AcpThreadView { ); cx.notify(); self.auth_task = Some(cx.spawn_in(window, { - let agent = self.agent.clone(); async move |this, cx| { let result = authenticate.await; match &result { Ok(_) => telemetry::event!( "Authenticate Agent Succeeded", - agent = agent.telemetry_id() + agent = agent_telemetry_id ), Err(_) => { telemetry::event!( "Authenticate Agent Failed", - agent = agent.telemetry_id(), + agent = agent_telemetry_id, ) } } @@ -1639,6 +1692,7 @@ impl AcpThreadView { None, this.workspace.clone(), this.project.clone(), + true, window, cx, ) @@ -1694,43 +1748,38 @@ impl AcpThreadView { connection.authenticate(method, cx) }; cx.notify(); - self.auth_task = - Some(cx.spawn_in(window, { - let agent = self.agent.clone(); - async move |this, cx| { - let result = authenticate.await; - - match &result { - Ok(_) => telemetry::event!( - "Authenticate Agent Succeeded", - agent = agent.telemetry_id() - ), - Err(_) => { - telemetry::event!( - "Authenticate Agent Failed", - agent = agent.telemetry_id(), - ) - } + self.auth_task = Some(cx.spawn_in(window, { + async move |this, cx| { + let result = authenticate.await; + + match &result { + Ok(_) => telemetry::event!( + "Authenticate Agent Succeeded", + agent = agent_telemetry_id + ), + Err(_) => { + telemetry::event!("Authenticate Agent Failed", agent = agent_telemetry_id,) } + } - this.update_in(cx, |this, window, cx| { - if let Err(err) = result { - if let ThreadState::Unauthenticated { - pending_auth_method, - .. - } = &mut this.thread_state - { - pending_auth_method.take(); - } - this.handle_thread_error(err, cx); - } else { - this.reset(window, cx); + this.update_in(cx, |this, window, cx| { + if let Err(err) = result { + if let ThreadState::Unauthenticated { + pending_auth_method, + .. + } = &mut this.thread_state + { + pending_auth_method.take(); } - this.auth_task.take() - }) - .ok(); - } - })); + this.handle_thread_error(err, cx); + } else { + this.reset(window, cx); + } + this.auth_task.take() + }) + .ok(); + } + })); } fn spawn_external_agent_login( @@ -1849,6 +1898,17 @@ impl AcpThreadView { }) } + pub fn has_user_submitted_prompt(&self, cx: &App) -> bool { + self.thread().is_some_and(|thread| { + thread.read(cx).entries().iter().any(|entry| { + matches!( + entry, + AgentThreadEntry::UserMessage(user_message) if user_message.id.is_some() + ) + }) + }) + } + fn authorize_tool_call( &mut self, tool_call_id: acp::ToolCallId, @@ -1860,10 +1920,11 @@ impl AcpThreadView { let Some(thread) = self.thread() else { return; }; + let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); telemetry::event!( "Agent Tool Call Authorized", - agent = self.agent.telemetry_id(), + agent = agent_telemetry_id, session = thread.read(cx).session_id(), option = option_kind ); @@ -1901,6 +1962,16 @@ impl AcpThreadView { window: &mut Window, cx: &Context, ) -> AnyElement { + let is_indented = entry.is_indented(); + let is_first_indented = is_indented + && self.thread().is_some_and(|thread| { + thread + .read(cx) + .entries() + .get(entry_ix.saturating_sub(1)) + .is_none_or(|entry| !entry.is_indented()) + }); + let primary = match &entry { AgentThreadEntry::UserMessage(message) => { let Some(editor) = self @@ -1933,7 +2004,9 @@ impl AcpThreadView { v_flex() .id(("user_message", entry_ix)) .map(|this| { - if entry_ix == 0 && !has_checkpoint_button && rules_item.is_none() { + if is_first_indented { + this.pt_0p5() + } else if entry_ix == 0 && !has_checkpoint_button && rules_item.is_none() { this.pt(rems_from_px(18.)) } else if rules_item.is_some() { this.pt_3() @@ -1979,6 +2052,9 @@ impl AcpThreadView { .shadow_md() .bg(cx.theme().colors().editor_background) .border_1() + .when(is_indented, |this| { + this.py_2().px_2().shadow_sm() + }) .when(editing && !editor_focus, |this| this.border_dashed()) .border_color(cx.theme().colors().border) .map(|this|{ @@ -2049,10 +2125,23 @@ impl AcpThreadView { .icon_size(IconSize::Small) .icon_color(Color::Muted) .style(ButtonStyle::Transparent) - .tooltip(move |_window, cx| { - cx.new(|_| UnavailableEditingTooltip::new(agent_name.clone())) - .into() - }) + .tooltip(Tooltip::element({ + move |_, _| { + v_flex() + .gap_1() + .child(Label::new("Unavailable Editing")).child( + div().max_w_64().child( + Label::new(format!( + "Editing previous messages is not available for {} yet.", + agent_name.clone() + )) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .into_any_element() + } + })) ) ) } @@ -2060,7 +2149,10 @@ impl AcpThreadView { ) .into_any() } - AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) => { + AgentThreadEntry::AssistantMessage(AssistantMessage { + chunks, + indented: _, + }) => { let is_last = entry_ix + 1 == total_entries; let style = default_markdown_style(false, false, window, cx); @@ -2094,6 +2186,7 @@ impl AcpThreadView { v_flex() .px_5() .py_1p5() + .when(is_first_indented, |this| this.pt_0p5()) .when(is_last, |this| this.pb_4()) .w_full() .text_ui(cx) @@ -2103,19 +2196,48 @@ impl AcpThreadView { AgentThreadEntry::ToolCall(tool_call) => { let has_terminals = tool_call.terminals().next().is_some(); - div().w_full().map(|this| { - if has_terminals { - this.children(tool_call.terminals().map(|terminal| { - self.render_terminal_tool_call( - entry_ix, terminal, tool_call, window, cx, - ) - })) - } else { - this.child(self.render_tool_call(entry_ix, tool_call, window, cx)) - } - }) + div() + .w_full() + .map(|this| { + if has_terminals { + this.children(tool_call.terminals().map(|terminal| { + self.render_terminal_tool_call( + entry_ix, terminal, tool_call, window, cx, + ) + })) + } else { + this.child(self.render_tool_call(entry_ix, tool_call, window, cx)) + } + }) + .into_any() } - .into_any(), + }; + + let primary = if is_indented { + let line_top = if is_first_indented { + rems_from_px(-12.0) + } else { + rems_from_px(0.0) + }; + + div() + .relative() + .w_full() + .pl(rems_from_px(20.0)) + .bg(cx.theme().colors().panel_background.opacity(0.2)) + .child( + div() + .absolute() + .left(rems_from_px(18.0)) + .top(line_top) + .bottom_0() + .w_px() + .bg(cx.theme().colors().border.opacity(0.6)), + ) + .child(primary) + .into_any_element() + } else { + primary }; let needs_confirmation = if let AgentThreadEntry::ToolCall(tool_call) = entry { @@ -2516,7 +2638,7 @@ impl AcpThreadView { acp::ToolKind::Think => IconName::ToolThink, acp::ToolKind::Fetch => IconName::ToolWeb, acp::ToolKind::SwitchMode => IconName::ArrowRightLeft, - acp::ToolKind::Other => IconName::ToolHammer, + acp::ToolKind::Other | _ => IconName::ToolHammer, }) } .size(IconSize::Small) @@ -2768,7 +2890,7 @@ impl AcpThreadView { }) .gap_0p5() .children(options.iter().map(move |option| { - let option_id = SharedString::from(option.id.0.clone()); + let option_id = SharedString::from(option.option_id.0.clone()); Button::new((option_id, entry_ix), option.name.clone()) .map(|this| { let (this, action) = match option.kind { @@ -2784,7 +2906,7 @@ impl AcpThreadView { this.icon(IconName::Close).icon_color(Color::Error), Some(&RejectOnce as &dyn Action), ), - acp::PermissionOptionKind::RejectAlways => { + acp::PermissionOptionKind::RejectAlways | _ => { (this.icon(IconName::Close).icon_color(Color::Error), None) } }; @@ -2809,7 +2931,7 @@ impl AcpThreadView { .label_size(LabelSize::Small) .on_click(cx.listener({ let tool_call_id = tool_call_id.clone(); - let option_id = option.id.clone(); + let option_id = option.option_id.clone(); let option_kind = option.kind; move |this, _, window, cx| { this.authorize_tool_call( @@ -3140,7 +3262,7 @@ impl AcpThreadView { .text_ui_sm(cx) .h_full() .children(terminal_view.map(|terminal_view| { - if terminal_view + let element = if terminal_view .read(cx) .content_mode(window, cx) .is_scrollable() @@ -3148,7 +3270,15 @@ impl AcpThreadView { div().h_72().child(terminal_view).into_any_element() } else { terminal_view.into_any_element() - } + }; + + div() + .on_action(cx.listener(|_this, _: &NewTerminal, window, cx| { + window.dispatch_action(NewThread.boxed_clone(), cx); + cx.stop_propagation(); + })) + .child(element) + .into_any_element() })), ) }) @@ -3465,7 +3595,9 @@ impl AcpThreadView { (method.id.0.clone(), method.name.clone()) }; - Button::new(SharedString::from(method_id.clone()), name) + let agent_telemetry_id = connection.telemetry_id(); + + Button::new(method_id.clone(), name) .label_size(LabelSize::Small) .map(|this| { if ix == 0 { @@ -3484,12 +3616,12 @@ impl AcpThreadView { cx.listener(move |this, _, window, cx| { telemetry::event!( "Authenticate Agent Started", - agent = this.agent.telemetry_id(), + agent = agent_telemetry_id, method = method_id ); this.authenticate( - acp::AuthMethodId(method_id.clone()), + acp::AuthMethodId::new(method_id.clone()), window, cx, ) @@ -3754,48 +3886,64 @@ impl AcpThreadView { })) } - fn render_plan_entries(&self, plan: &Plan, window: &mut Window, cx: &Context) -> Div { - v_flex().children(plan.entries.iter().enumerate().flat_map(|(index, entry)| { - let element = h_flex() - .py_1() - .px_2() - .gap_2() - .justify_between() - .bg(cx.theme().colors().editor_background) - .when(index < plan.entries.len() - 1, |parent| { - parent.border_color(cx.theme().colors().border).border_b_1() - }) - .child( - h_flex() - .id(("plan_entry", index)) - .gap_1p5() - .max_w_full() - .overflow_x_scroll() - .text_xs() - .text_color(cx.theme().colors().text_muted) - .child(match entry.status { - acp::PlanEntryStatus::Pending => Icon::new(IconName::TodoPending) - .size(IconSize::Small) - .color(Color::Muted) - .into_any_element(), - acp::PlanEntryStatus::InProgress => Icon::new(IconName::TodoProgress) - .size(IconSize::Small) - .color(Color::Accent) - .with_rotate_animation(2) - .into_any_element(), - acp::PlanEntryStatus::Completed => Icon::new(IconName::TodoComplete) - .size(IconSize::Small) - .color(Color::Success) - .into_any_element(), - }) - .child(MarkdownElement::new( - entry.content.clone(), - plan_label_markdown_style(&entry.status, window, cx), - )), - ); + fn render_plan_entries( + &self, + plan: &Plan, + window: &mut Window, + cx: &Context, + ) -> impl IntoElement { + v_flex() + .id("plan_items_list") + .max_h_40() + .overflow_y_scroll() + .children(plan.entries.iter().enumerate().flat_map(|(index, entry)| { + let element = h_flex() + .py_1() + .px_2() + .gap_2() + .justify_between() + .bg(cx.theme().colors().editor_background) + .when(index < plan.entries.len() - 1, |parent| { + parent.border_color(cx.theme().colors().border).border_b_1() + }) + .child( + h_flex() + .id(("plan_entry", index)) + .gap_1p5() + .max_w_full() + .overflow_x_scroll() + .text_xs() + .text_color(cx.theme().colors().text_muted) + .child(match entry.status { + acp::PlanEntryStatus::InProgress => { + Icon::new(IconName::TodoProgress) + .size(IconSize::Small) + .color(Color::Accent) + .with_rotate_animation(2) + .into_any_element() + } + acp::PlanEntryStatus::Completed => { + Icon::new(IconName::TodoComplete) + .size(IconSize::Small) + .color(Color::Success) + .into_any_element() + } + acp::PlanEntryStatus::Pending | _ => { + Icon::new(IconName::TodoPending) + .size(IconSize::Small) + .color(Color::Muted) + .into_any_element() + } + }) + .child(MarkdownElement::new( + entry.content.clone(), + plan_label_markdown_style(&entry.status, window, cx), + )), + ); - Some(element) - })) + Some(element) + })) + .into_any_element() } fn render_edits_summary( @@ -3933,162 +4081,185 @@ impl AcpThreadView { changed_buffers: &BTreeMap, Entity>, pending_edits: bool, cx: &Context, - ) -> Div { + ) -> impl IntoElement { let editor_bg_color = cx.theme().colors().editor_background; - v_flex().children(changed_buffers.iter().enumerate().flat_map( - |(index, (buffer, _diff))| { - let file = buffer.read(cx).file()?; - let path = file.path(); - let path_style = file.path_style(cx); - let separator = file.path_style(cx).separator(); + v_flex() + .id("edited_files_list") + .max_h_40() + .overflow_y_scroll() + .children( + changed_buffers + .iter() + .enumerate() + .flat_map(|(index, (buffer, _diff))| { + let file = buffer.read(cx).file()?; + let path = file.path(); + let path_style = file.path_style(cx); + let separator = file.path_style(cx).primary_separator(); + + let file_path = path.parent().and_then(|parent| { + if parent.is_empty() { + None + } else { + Some( + Label::new(format!( + "{}{separator}", + parent.display(path_style) + )) + .color(Color::Muted) + .size(LabelSize::XSmall) + .buffer_font(cx), + ) + } + }); - let file_path = path.parent().and_then(|parent| { - if parent.is_empty() { - None - } else { - Some( - Label::new(format!("{}{separator}", parent.display(path_style))) - .color(Color::Muted) + let file_name = path.file_name().map(|name| { + Label::new(name.to_string()) .size(LabelSize::XSmall) - .buffer_font(cx), - ) - } - }); + .buffer_font(cx) + .ml_1p5() + }); - let file_name = path.file_name().map(|name| { - Label::new(name.to_string()) - .size(LabelSize::XSmall) - .buffer_font(cx) - .ml_1p5() - }); + let full_path = path.display(path_style).to_string(); - let file_icon = FileIcons::get_icon(path.as_std_path(), cx) - .map(Icon::from_path) - .map(|icon| icon.color(Color::Muted).size(IconSize::Small)) - .unwrap_or_else(|| { - Icon::new(IconName::File) - .color(Color::Muted) - .size(IconSize::Small) - }); + let file_icon = FileIcons::get_icon(path.as_std_path(), cx) + .map(Icon::from_path) + .map(|icon| icon.color(Color::Muted).size(IconSize::Small)) + .unwrap_or_else(|| { + Icon::new(IconName::File) + .color(Color::Muted) + .size(IconSize::Small) + }); - let overlay_gradient = linear_gradient( - 90., - linear_color_stop(editor_bg_color, 1.), - linear_color_stop(editor_bg_color.opacity(0.2), 0.), - ); + let overlay_gradient = linear_gradient( + 90., + linear_color_stop(editor_bg_color, 1.), + linear_color_stop(editor_bg_color.opacity(0.2), 0.), + ); - let element = h_flex() - .group("edited-code") - .id(("file-container", index)) - .py_1() - .pl_2() - .pr_1() - .gap_2() - .justify_between() - .bg(editor_bg_color) - .when(index < changed_buffers.len() - 1, |parent| { - parent.border_color(cx.theme().colors().border).border_b_1() - }) - .child( - h_flex() - .id(("file-name-row", index)) - .relative() - .pr_8() - .w_full() - .overflow_x_scroll() + let element = h_flex() + .group("edited-code") + .id(("file-container", index)) + .py_1() + .pl_2() + .pr_1() + .gap_2() + .justify_between() + .bg(editor_bg_color) + .when(index < changed_buffers.len() - 1, |parent| { + parent.border_color(cx.theme().colors().border).border_b_1() + }) .child( h_flex() - .id(("file-name-path", index)) - .cursor_pointer() - .pr_0p5() - .gap_0p5() - .hover(|s| s.bg(cx.theme().colors().element_hover)) - .rounded_xs() - .child(file_icon) - .children(file_name) - .children(file_path) - .tooltip(Tooltip::text("Go to File")) - .on_click({ - let buffer = buffer.clone(); - cx.listener(move |this, _, window, cx| { - this.open_edited_buffer(&buffer, window, cx); - }) - }), - ) - .child( - div() - .absolute() - .h_full() - .w_12() - .top_0() - .bottom_0() - .right_0() - .bg(overlay_gradient), - ), - ) - .child( - h_flex() - .gap_1() - .visible_on_hover("edited-code") - .child( - Button::new("review", "Review") - .label_size(LabelSize::Small) - .on_click({ - let buffer = buffer.clone(); - cx.listener(move |this, _, window, cx| { - this.open_edited_buffer(&buffer, window, cx); - }) - }), + .id(("file-name-row", index)) + .relative() + .pr_8() + .w_full() + .child( + h_flex() + .id(("file-name-path", index)) + .cursor_pointer() + .pr_0p5() + .gap_0p5() + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .rounded_xs() + .child(file_icon) + .children(file_name) + .children(file_path) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Go to File", + None, + full_path.clone(), + cx, + ) + }) + .on_click({ + let buffer = buffer.clone(); + cx.listener(move |this, _, window, cx| { + this.open_edited_buffer(&buffer, window, cx); + }) + }), + ) + .child( + div() + .absolute() + .h_full() + .w_12() + .top_0() + .bottom_0() + .right_0() + .bg(overlay_gradient), + ), ) - .child(Divider::vertical().color(DividerColor::BorderVariant)) .child( - Button::new("reject-file", "Reject") - .label_size(LabelSize::Small) - .disabled(pending_edits) - .on_click({ - let buffer = buffer.clone(); - let action_log = action_log.clone(); - let telemetry = telemetry.clone(); - move |_, _, cx| { - action_log.update(cx, |action_log, cx| { - action_log + h_flex() + .gap_1() + .visible_on_hover("edited-code") + .child( + Button::new("review", "Review") + .label_size(LabelSize::Small) + .on_click({ + let buffer = buffer.clone(); + cx.listener(move |this, _, window, cx| { + this.open_edited_buffer(&buffer, window, cx); + }) + }), + ) + .child(Divider::vertical().color(DividerColor::BorderVariant)) + .child( + Button::new("reject-file", "Reject") + .label_size(LabelSize::Small) + .disabled(pending_edits) + .on_click({ + let buffer = buffer.clone(); + let action_log = action_log.clone(); + let telemetry = telemetry.clone(); + move |_, _, cx| { + action_log.update(cx, |action_log, cx| { + action_log .reject_edits_in_ranges( buffer.clone(), - vec![Anchor::MIN..Anchor::MAX], + vec![Anchor::min_max_range_for_buffer( + buffer.read(cx).remote_id(), + )], Some(telemetry.clone()), cx, ) .detach_and_log_err(cx); - }) - } - }), - ) - .child( - Button::new("keep-file", "Keep") - .label_size(LabelSize::Small) - .disabled(pending_edits) - .on_click({ - let buffer = buffer.clone(); - let action_log = action_log.clone(); - let telemetry = telemetry.clone(); - move |_, _, cx| { - action_log.update(cx, |action_log, cx| { - action_log.keep_edits_in_range( - buffer.clone(), - Anchor::MIN..Anchor::MAX, - Some(telemetry.clone()), - cx, - ); - }) - } - }), - ), - ); + }) + } + }), + ) + .child( + Button::new("keep-file", "Keep") + .label_size(LabelSize::Small) + .disabled(pending_edits) + .on_click({ + let buffer = buffer.clone(); + let action_log = action_log.clone(); + let telemetry = telemetry.clone(); + move |_, _, cx| { + action_log.update(cx, |action_log, cx| { + action_log.keep_edits_in_range( + buffer.clone(), + Anchor::min_max_range_for_buffer( + buffer.read(cx).remote_id(), + ), + Some(telemetry.clone()), + cx, + ); + }) + } + }), + ), + ); - Some(element) - }, - )) + Some(element) + }), + ) + .into_any_element() } fn render_message_editor(&mut self, window: &mut Window, cx: &mut Context) -> AnyElement { @@ -4109,8 +4280,10 @@ impl AcpThreadView { .block_mouse_except_scroll(); let enable_editor = match self.thread_state { - ThreadState::Loading { .. } | ThreadState::Ready { .. } => true, - ThreadState::Unauthenticated { .. } | ThreadState::LoadError(..) => false, + ThreadState::Ready { .. } => true, + ThreadState::Loading { .. } + | ThreadState::Unauthenticated { .. } + | ThreadState::LoadError(..) => false, }; v_flex() @@ -4123,7 +4296,11 @@ impl AcpThreadView { } })) .on_action(cx.listener(|this, _: &CycleModeSelector, window, cx| { - if let Some(mode_selector) = this.mode_selector() { + if let Some(profile_selector) = this.profile_selector.as_ref() { + profile_selector.update(cx, |profile_selector, cx| { + profile_selector.cycle_profile(cx); + }); + } else if let Some(mode_selector) = this.mode_selector() { mode_selector.update(cx, |mode_selector, cx| { mode_selector.cycle_mode(window, cx); }); @@ -4135,6 +4312,13 @@ impl AcpThreadView { .update(cx, |model_selector, cx| model_selector.toggle(window, cx)); } })) + .on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| { + if let Some(model_selector) = this.model_selector.as_ref() { + model_selector.update(cx, |model_selector, cx| { + model_selector.cycle_favorite_models(window, cx); + }); + } + })) .p_2() .gap_2() .border_t_1() @@ -4188,6 +4372,8 @@ impl AcpThreadView { .justify_between() .child( h_flex() + .gap_0p5() + .child(self.render_add_context_button(cx)) .child(self.render_follow_toggle(cx)) .children(self.render_burn_mode_toggle(cx)), ) @@ -4340,7 +4526,7 @@ impl AcpThreadView { self.authorize_tool_call( tool_call.id.clone(), - option.id.clone(), + option.option_id.clone(), option.kind, window, cx, @@ -4502,6 +4688,29 @@ impl AcpThreadView { })) } + fn render_add_context_button(&self, cx: &mut Context) -> impl IntoElement { + let message_editor = self.message_editor.clone(); + let menu_visible = message_editor.read(cx).is_completions_menu_visible(cx); + + IconButton::new("add-context", IconName::AtSign) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .when(!menu_visible, |this| { + this.tooltip(move |_window, cx| { + Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx) + }) + }) + .on_click(cx.listener(move |_this, _, window, cx| { + let message_editor_clone = message_editor.clone(); + + window.defer(cx, move |window, cx| { + message_editor_clone.update(cx, |message_editor, cx| { + message_editor.trigger_completion_menu(window, cx); + }); + }); + })) + } + fn render_markdown(&self, markdown: Entity, style: MarkdownStyle) -> MarkdownElement { let workspace = self.workspace.clone(); MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| { @@ -4670,11 +4879,8 @@ impl AcpThreadView { let buffer = multibuffer.as_singleton(); if agent_location.buffer.upgrade() == buffer { let excerpt_id = multibuffer.excerpt_ids().first().cloned(); - let anchor = editor::Anchor::in_buffer( - excerpt_id.unwrap(), - buffer.unwrap().read(cx).remote_id(), - agent_location.position, - ); + let anchor = + editor::Anchor::in_buffer(excerpt_id.unwrap(), agent_location.position); editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([anchor..anchor]); }) @@ -4723,7 +4929,7 @@ impl AcpThreadView { buffer.update(cx, |buffer, cx| { buffer.set_text(markdown, cx); buffer.set_language(Some(markdown_language), cx); - buffer.set_capability(language::Capability::ReadOnly, cx); + buffer.set_capability(language::Capability::ReadWrite, cx); })?; workspace.update_in(cx, |workspace, window, cx| { @@ -4752,6 +4958,32 @@ impl AcpThreadView { cx.notify(); } + fn scroll_to_most_recent_user_prompt(&mut self, cx: &mut Context) { + let Some(thread) = self.thread() else { + return; + }; + + let entries = thread.read(cx).entries(); + if entries.is_empty() { + return; + } + + // Find the most recent user message and scroll it to the top of the viewport. + // (Fallback: if no user message exists, scroll to the bottom.) + if let Some(ix) = entries + .iter() + .rposition(|entry| matches!(entry, AgentThreadEntry::UserMessage(_))) + { + self.list_state.scroll_to(ListOffset { + item_ix: ix, + offset_in_item: px(0.0), + }); + cx.notify(); + } else { + self.scroll_to_bottom(cx); + } + } + pub fn scroll_to_bottom(&mut self, cx: &mut Context) { if let Some(thread) = self.thread() { let entry_count = thread.read(cx).entries().len(); @@ -4847,8 +5079,8 @@ impl AcpThreadView { }); if let Some(screen_window) = cx - .open_window(options, |_, cx| { - cx.new(|_| { + .open_window(options, |_window, cx| { + cx.new(|_cx| { AgentNotification::new(title.clone(), caption.clone(), icon, project_name) }) }) @@ -4970,6 +5202,16 @@ impl AcpThreadView { } })); + let scroll_to_recent_user_prompt = + IconButton::new("scroll_to_recent_user_prompt", IconName::ForwardArrow) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::Small) + .icon_color(Color::Ignored) + .tooltip(Tooltip::text("Scroll To Most Recent User Prompt")) + .on_click(cx.listener(move |this, _, _, cx| { + this.scroll_to_most_recent_user_prompt(cx); + })); + let scroll_to_top = IconButton::new("scroll_to_top", IconName::ArrowUp) .shape(ui::IconButtonShape::Square) .icon_size(IconSize::Small) @@ -4980,15 +5222,12 @@ impl AcpThreadView { })); let mut container = h_flex() - .id("thread-controls-container") - .group("thread-controls-container") .w_full() .py_2() .px_5() .gap_px() .opacity(0.6) - .hover(|style| style.opacity(1.)) - .flex_wrap() + .hover(|s| s.opacity(1.)) .justify_end(); if AgentSettings::get_global(cx).enable_feedback @@ -4998,23 +5237,13 @@ impl AcpThreadView { { let feedback = self.thread_feedback.feedback; - container = container - .child( - div().visible_on_hover("thread-controls-container").child( - Label::new(match feedback { - Some(ThreadFeedback::Positive) => "Thanks for your feedback!", - Some(ThreadFeedback::Negative) => { - "We appreciate your feedback and will use it to improve." - } - None => { - "Rating the thread sends all of your current conversation to the Zed team." - } - }) - .color(Color::Muted) - .size(LabelSize::XSmall) - .truncate(), - ), + let tooltip_meta = || { + SharedString::new( + "Rating the thread sends all of your current conversation to the Zed team.", ) + }; + + container = container .child( IconButton::new("feedback-thumbs-up", IconName::ThumbsUp) .shape(ui::IconButtonShape::Square) @@ -5023,7 +5252,12 @@ impl AcpThreadView { Some(ThreadFeedback::Positive) => Color::Accent, _ => Color::Ignored, }) - .tooltip(Tooltip::text("Helpful Response")) + .tooltip(move |window, cx| match feedback { + Some(ThreadFeedback::Positive) => { + Tooltip::text("Thanks for your feedback!")(window, cx) + } + _ => Tooltip::with_meta("Helpful Response", None, tooltip_meta(), cx), + }) .on_click(cx.listener(move |this, _, window, cx| { this.handle_feedback_click(ThreadFeedback::Positive, window, cx); })), @@ -5036,7 +5270,16 @@ impl AcpThreadView { Some(ThreadFeedback::Negative) => Color::Accent, _ => Color::Ignored, }) - .tooltip(Tooltip::text("Not Helpful")) + .tooltip(move |window, cx| match feedback { + Some(ThreadFeedback::Negative) => { + Tooltip::text( + "We appreciate your feedback and will use it to improve in the future.", + )(window, cx) + } + _ => { + Tooltip::with_meta("Not Helpful Response", None, tooltip_meta(), cx) + } + }) .on_click(cx.listener(move |this, _, window, cx| { this.handle_feedback_click(ThreadFeedback::Negative, window, cx); })), @@ -5045,6 +5288,7 @@ impl AcpThreadView { container .child(open_as_markdown) + .child(scroll_to_recent_user_prompt) .child(scroll_to_top) .into_any_element() } @@ -5276,52 +5520,44 @@ impl AcpThreadView { ) } - fn render_codex_windows_warning(&self, cx: &mut Context) -> Option { - if self.show_codex_windows_warning { - Some( - Callout::new() - .icon(IconName::Warning) - .severity(Severity::Warning) - .title("Codex on Windows") - .description( - "For best performance, run Codex in Windows Subsystem for Linux (WSL2)", - ) - .actions_slot( - Button::new("open-wsl-modal", "Open in WSL") - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .on_click(cx.listener({ - move |_, _, _window, cx| { - #[cfg(windows)] - _window.dispatch_action( - zed_actions::wsl_actions::OpenWsl::default().boxed_clone(), - cx, - ); - cx.notify(); - } - })), - ) - .dismiss_action( - IconButton::new("dismiss", IconName::Close) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("Dismiss Warning")) - .on_click(cx.listener({ - move |this, _, _, cx| { - this.show_codex_windows_warning = false; - cx.notify(); - } - })), - ), + fn render_codex_windows_warning(&self, cx: &mut Context) -> Callout { + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .title("Codex on Windows") + .description("For best performance, run Codex in Windows Subsystem for Linux (WSL2)") + .actions_slot( + Button::new("open-wsl-modal", "Open in WSL") + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .on_click(cx.listener({ + move |_, _, _window, cx| { + #[cfg(windows)] + _window.dispatch_action( + zed_actions::wsl_actions::OpenWsl::default().boxed_clone(), + cx, + ); + cx.notify(); + } + })), + ) + .dismiss_action( + IconButton::new("dismiss", IconName::Close) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Dismiss Warning")) + .on_click(cx.listener({ + move |this, _, _, cx| { + this.show_codex_windows_warning = false; + cx.notify(); + } + })), ) - } else { - None - } } - fn render_thread_error(&self, cx: &mut Context) -> Option
{ + fn render_thread_error(&mut self, window: &mut Window, cx: &mut Context) -> Option
{ let content = match self.thread_error.as_ref()? { - ThreadError::Other(error) => self.render_any_thread_error(error.clone(), cx), + ThreadError::Other(error) => self.render_any_thread_error(error.clone(), window, cx), ThreadError::Refusal => self.render_refusal_error(cx), ThreadError::AuthenticationRequired(error) => { self.render_authentication_required_error(error.clone(), cx) @@ -5368,6 +5604,16 @@ impl AcpThreadView { ) } + fn current_mode_id(&self, cx: &App) -> Option> { + if let Some(thread) = self.as_native_thread(cx) { + Some(thread.read(cx).profile().0.clone()) + } else if let Some(mode_selector) = self.mode_selector() { + Some(mode_selector.read(cx).mode().0) + } else { + None + } + } + fn current_model_id(&self, cx: &App) -> Option { self.model_selector .as_ref() @@ -5406,7 +5652,12 @@ impl AcpThreadView { .dismiss_action(self.dismiss_error_button(cx)) } - fn render_any_thread_error(&self, error: SharedString, cx: &mut Context<'_, Self>) -> Callout { + fn render_any_thread_error( + &mut self, + error: SharedString, + window: &mut Window, + cx: &mut Context<'_, Self>, + ) -> Callout { let can_resume = self .thread() .map_or(false, |thread| thread.read(cx).can_resume(cx)); @@ -5419,11 +5670,24 @@ impl AcpThreadView { supports_burn_mode && thread.completion_mode() == CompletionMode::Normal }); + let markdown = if let Some(markdown) = &self.thread_error_markdown { + markdown.clone() + } else { + let markdown = cx.new(|cx| Markdown::new(error.clone(), None, None, cx)); + self.thread_error_markdown = Some(markdown.clone()); + markdown + }; + + let markdown_style = default_markdown_style(false, true, window, cx); + let description = self + .render_markdown(markdown, markdown_style) + .into_any_element(); + Callout::new() .severity(Severity::Error) - .title("Error") .icon(IconName::XCircle) - .description(error.clone()) + .title("An Error Happened") + .description_slot(description) .actions_slot( h_flex() .gap_0p5() @@ -5442,11 +5706,9 @@ impl AcpThreadView { }) .when(can_resume, |this| { this.child( - Button::new("retry", "Retry") - .icon(IconName::RotateCw) - .icon_position(IconPosition::Start) + IconButton::new("retry", IconName::RotateCw) .icon_size(IconSize::Small) - .label_size(LabelSize::Small) + .tooltip(Tooltip::text("Retry Generation")) .on_click(cx.listener(|this, _, _window, cx| { this.resume_chat(cx); })), @@ -5588,7 +5850,6 @@ impl AcpThreadView { IconButton::new("copy", IconName::Copy) .icon_size(IconSize::Small) - .icon_color(Color::Muted) .tooltip(Tooltip::text("Copy Error Message")) .on_click(move |_, _, cx| { cx.write_to_clipboard(ClipboardItem::new_string(message.clone())) @@ -5598,7 +5859,6 @@ impl AcpThreadView { fn dismiss_error_button(&self, cx: &mut Context) -> impl IntoElement { IconButton::new("dismiss", IconName::Close) .icon_size(IconSize::Small) - .icon_color(Color::Muted) .tooltip(Tooltip::text("Dismiss Error")) .on_click(cx.listener({ move |this, _, _, cx| { @@ -5625,6 +5885,11 @@ impl AcpThreadView { provider_id: None, }; this.clear_thread_error(cx); + if let Some(message) = this.in_flight_prompt.take() { + this.message_editor.update(cx, |editor, cx| { + editor.set_message(message, window, cx); + }); + } let this = cx.weak_entity(); window.defer(cx, |window, cx| { Self::handle_auth_required(this, err, agent, connection, window, cx); @@ -5719,12 +5984,10 @@ fn placeholder_text(agent_name: &str, has_commands: bool) -> String { impl Focusable for AcpThreadView { fn focus_handle(&self, cx: &App) -> FocusHandle { match self.thread_state { - ThreadState::Loading { .. } | ThreadState::Ready { .. } => { - self.active_editor(cx).focus_handle(cx) - } - ThreadState::LoadError(_) | ThreadState::Unauthenticated { .. } => { - self.focus_handle.clone() - } + ThreadState::Ready { .. } => self.active_editor(cx).focus_handle(cx), + ThreadState::Loading { .. } + | ThreadState::LoadError(_) + | ThreadState::Unauthenticated { .. } => self.focus_handle.clone(), } } } @@ -5792,7 +6055,7 @@ impl Render for AcpThreadView { .flex_grow() .into_any(), ) - .vertical_scrollbar_for(self.list_state.clone(), window, cx) + .vertical_scrollbar_for(&self.list_state, window, cx) .into_any() } else { this.child(self.render_recent_history(cx)).into_any() @@ -5809,14 +6072,10 @@ impl Render for AcpThreadView { _ => this, }) .children(self.render_thread_retry_status_callout(window, cx)) - .children({ - if cfg!(windows) && self.project.read(cx).is_local() { - self.render_codex_windows_warning(cx) - } else { - None - } + .when(self.show_codex_windows_warning, |this| { + this.child(self.render_codex_windows_warning(cx)) }) - .children(self.render_thread_error(cx)) + .children(self.render_thread_error(window, cx)) .when_some( self.new_server_version_available.as_ref().filter(|_| { !has_messages || !matches!(self.thread_state, ThreadState::Ready { .. }) @@ -5882,7 +6141,6 @@ fn default_markdown_style( syntax: cx.theme().syntax().clone(), selection_background_color: colors.element_selection_background, code_block_overflow_x_scroll: true, - table_overflow_x_scroll: true, heading_level_styles: Some(HeadingLevelStyles { h1: Some(TextStyleRefinement { font_size: Some(rems(1.15).into()), @@ -5931,13 +6189,13 @@ fn default_markdown_style( }, border_color: Some(colors.border_variant), background: Some(colors.editor_background.into()), - text: Some(TextStyleRefinement { + text: TextStyleRefinement { font_family: Some(theme_settings.buffer_font.family.clone()), font_fallbacks: theme_settings.buffer_font.fallbacks.clone(), font_features: Some(theme_settings.buffer_font.features.clone()), font_size: Some(buffer_font_size.into()), ..Default::default() - }), + }, ..Default::default() }, inline_code: TextStyleRefinement { @@ -5950,6 +6208,7 @@ fn default_markdown_style( }, link: TextStyleRefinement { background_color: Some(colors.editor_foreground.opacity(0.025)), + color: Some(colors.text_accent), underline: Some(UnderlineStyle { color: Some(colors.text_accent.opacity(0.5)), thickness: px(1.), @@ -6002,8 +6261,9 @@ pub(crate) mod tests { use acp_thread::StubAgentConnection; use agent_client_protocol::SessionId; use assistant_text_thread::TextThreadStore; + use editor::MultiBufferOffset; use fs::FakeFs; - use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext}; + use gpui::{EventEmitter, TestAppContext, VisualTestContext}; use project::Project; use serde_json::json; use settings::SettingsStore; @@ -6107,27 +6367,18 @@ pub(crate) mod tests { async fn test_notification_for_tool_authorization(cx: &mut TestAppContext) { init_test(cx); - let tool_call_id = acp::ToolCallId("1".into()); - let tool_call = acp::ToolCall { - id: tool_call_id.clone(), - title: "Label".into(), - kind: acp::ToolKind::Edit, - status: acp::ToolCallStatus::Pending, - content: vec!["hi".into()], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - }; + let tool_call_id = acp::ToolCallId::new("1"); + let tool_call = acp::ToolCall::new(tool_call_id.clone(), "Label") + .kind(acp::ToolKind::Edit) + .content(vec!["hi".into()]); let connection = StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( tool_call_id, - vec![acp::PermissionOption { - id: acp::PermissionOptionId("1".into()), - name: "Allow".into(), - kind: acp::PermissionOptionKind::AllowOnce, - meta: None, - }], + vec![acp::PermissionOption::new( + "1", + "Allow", + acp::PermissionOptionKind::AllowOnce, + )], )])); connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); @@ -6255,6 +6506,57 @@ pub(crate) mod tests { ); } + #[gpui::test] + async fn test_notification_closed_when_thread_view_dropped(cx: &mut TestAppContext) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; + + let weak_view = thread_view.downgrade(); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + + cx.deactivate_window(); + + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + // Verify notification is shown + assert!( + cx.windows() + .iter() + .any(|window| window.downcast::().is_some()), + "Expected notification to be shown" + ); + + // Drop the thread view (simulating navigation to a new thread) + drop(thread_view); + drop(message_editor); + // Trigger an update to flush effects, which will call release_dropped_entities + cx.update(|_window, _cx| {}); + cx.run_until_parked(); + + // Verify the entity was actually released + assert!( + !weak_view.is_upgradable(), + "Thread view entity should be released after dropping" + ); + + // The notification should be automatically closed via on_release + assert!( + !cx.windows() + .iter() + .any(|window| window.downcast::().is_some()), + "Notification should be closed when thread view is dropped" + ); + } + async fn setup_thread_view( agent: impl AgentServer + 'static, cx: &mut TestAppContext, @@ -6279,6 +6581,7 @@ pub(crate) mod tests { project, history_store, None, + false, window, cx, ) @@ -6346,10 +6649,7 @@ pub(crate) mod tests { fn default_response() -> Self { let conn = StubAgentConnection::new(); conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: "Default response".into(), - meta: None, - }, + acp::ContentChunk::new("Default response".into()), )]); Self::new(conn) } @@ -6359,10 +6659,6 @@ pub(crate) mod tests { where C: 'static + AgentConnection + Send + Clone, { - fn telemetry_id(&self) -> &'static str { - "test" - } - fn logo(&self) -> ui::IconName { ui::IconName::Ai } @@ -6389,8 +6685,8 @@ pub(crate) mod tests { struct SaboteurAgentConnection; impl AgentConnection for SaboteurAgentConnection { - fn telemetry_id(&self) -> &'static str { - "saboteur" + fn telemetry_id(&self) -> SharedString { + "saboteur".into() } fn new_thread( @@ -6406,13 +6702,13 @@ pub(crate) mod tests { self, project, action_log, - SessionId("test".into()), - watch::Receiver::constant(acp::PromptCapabilities { - image: true, - audio: true, - embedded_context: true, - meta: None, - }), + SessionId::new("test"), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), cx, ) }))) @@ -6453,8 +6749,8 @@ pub(crate) mod tests { struct RefusalAgentConnection; impl AgentConnection for RefusalAgentConnection { - fn telemetry_id(&self) -> &'static str { - "refusal" + fn telemetry_id(&self) -> SharedString { + "refusal".into() } fn new_thread( @@ -6470,13 +6766,13 @@ pub(crate) mod tests { self, project, action_log, - SessionId("test".into()), - watch::Receiver::constant(acp::PromptCapabilities { - image: true, - audio: true, - embedded_context: true, - meta: None, - }), + SessionId::new("test"), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), cx, ) }))) @@ -6500,10 +6796,7 @@ pub(crate) mod tests { _params: acp::PromptRequest, _cx: &mut App, ) -> Task> { - Task::ready(Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Refusal, - meta: None, - })) + Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::Refusal))) } fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) { @@ -6520,7 +6813,7 @@ pub(crate) mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); prompt_store::init(cx) }); } @@ -6558,6 +6851,7 @@ pub(crate) mod tests { project.clone(), history_store.clone(), None, + false, window, cx, ) @@ -6571,24 +6865,14 @@ pub(crate) mod tests { .unwrap(); // First user message - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall { - id: acp::ToolCallId("tool1".into()), - title: "Edit file 1".into(), - kind: acp::ToolKind::Edit, - status: acp::ToolCallStatus::Completed, - content: vec![acp::ToolCallContent::Diff { - diff: acp::Diff { - path: "/project/test1.txt".into(), - old_text: Some("old content 1".into()), - new_text: "new content 1".into(), - meta: None, - }, - }], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - })]); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall( + acp::ToolCall::new("tool1", "Edit file 1") + .kind(acp::ToolKind::Edit) + .status(acp::ToolCallStatus::Completed) + .content(vec![acp::ToolCallContent::Diff( + acp::Diff::new("/project/test1.txt", "new content 1").old_text("old content 1"), + )]), + )]); thread .update(cx, |thread, cx| thread.send_raw("Give me a diff", cx)) @@ -6614,24 +6898,14 @@ pub(crate) mod tests { }); // Second user message - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall { - id: acp::ToolCallId("tool2".into()), - title: "Edit file 2".into(), - kind: acp::ToolKind::Edit, - status: acp::ToolCallStatus::Completed, - content: vec![acp::ToolCallContent::Diff { - diff: acp::Diff { - path: "/project/test2.txt".into(), - old_text: Some("old content 2".into()), - new_text: "new content 2".into(), - meta: None, - }, - }], - locations: vec![], - raw_input: None, - raw_output: None, - meta: None, - })]); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall( + acp::ToolCall::new("tool2", "Edit file 2") + .kind(acp::ToolKind::Edit) + .status(acp::ToolCallStatus::Completed) + .content(vec![acp::ToolCallContent::Diff( + acp::Diff::new("/project/test2.txt", "new content 2").old_text("old content 2"), + )]), + )]); thread .update(cx, |thread, cx| thread.send_raw("Another one", cx)) @@ -6698,6 +6972,70 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_scroll_to_most_recent_user_prompt(cx: &mut TestAppContext) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + // Each user prompt will result in a user message entry plus an agent message entry. + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Response 1".into()), + )]); + + let (thread_view, cx) = + setup_thread_view(StubAgentServer::new(connection.clone()), cx).await; + + let thread = thread_view + .read_with(cx, |view, _| view.thread().cloned()) + .unwrap(); + + thread + .update(cx, |thread, cx| thread.send_raw("Prompt 1", cx)) + .await + .unwrap(); + cx.run_until_parked(); + + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Response 2".into()), + )]); + + thread + .update(cx, |thread, cx| thread.send_raw("Prompt 2", cx)) + .await + .unwrap(); + cx.run_until_parked(); + + // Move somewhere else first so we're not trivially already on the last user prompt. + thread_view.update(cx, |view, cx| { + view.scroll_to_top(cx); + }); + cx.run_until_parked(); + + thread_view.update(cx, |view, cx| { + view.scroll_to_most_recent_user_prompt(cx); + let scroll_top = view.list_state.logical_scroll_top(); + // Entries layout is: [User1, Assistant1, User2, Assistant2] + assert_eq!(scroll_top.item_ix, 2); + }); + } + + #[gpui::test] + async fn test_scroll_to_most_recent_user_prompt_falls_back_to_bottom_without_user_messages( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; + + // With no entries, scrolling should be a no-op and must not panic. + thread_view.update(cx, |view, cx| { + view.scroll_to_most_recent_user_prompt(cx); + let scroll_top = view.list_state.logical_scroll_top(); + assert_eq!(scroll_top.item_ix, 0); + }); + } + #[gpui::test] async fn test_message_editing_cancel(cx: &mut TestAppContext) { init_test(cx); @@ -6705,14 +7043,7 @@ pub(crate) mod tests { let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }, + acp::ContentChunk::new("Response".into()), )]); let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; @@ -6798,14 +7129,7 @@ pub(crate) mod tests { let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }, + acp::ContentChunk::new("Response".into()), )]); let (thread_view, cx) = @@ -6845,14 +7169,7 @@ pub(crate) mod tests { // Send connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "New Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }, + acp::ContentChunk::new("New Response".into()), )]); user_message_editor.update_in(cx, |_editor, window, cx| { @@ -6940,14 +7257,7 @@ pub(crate) mod tests { cx.update(|_, cx| { connection.send_update( session_id.clone(), - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("Response".into())), cx, ); connection.end_turn(session_id, acp::StopReason::EndTurn); @@ -6999,10 +7309,9 @@ pub(crate) mod tests { cx.update(|_, cx| { connection.send_update( session_id.clone(), - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: "Message 1 resp".into(), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new( + "Message 1 resp".into(), + )), cx, ); }); @@ -7036,10 +7345,7 @@ pub(crate) mod tests { // Simulate a response sent after beginning to cancel connection.send_update( session_id.clone(), - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: "onse".into(), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("onse".into())), cx, ); }); @@ -7070,10 +7376,9 @@ pub(crate) mod tests { cx.update(|_, cx| { connection.send_update( session_id.clone(), - acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { - content: "Message 2 response".into(), - meta: None, - }), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new( + "Message 2 response".into(), + )), cx, ); connection.end_turn(session_id.clone(), acp::StopReason::EndTurn); @@ -7112,14 +7417,7 @@ pub(crate) mod tests { let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }, + acp::ContentChunk::new("Response".into()), )]); let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; @@ -7170,7 +7468,7 @@ pub(crate) mod tests { Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges([8..15]); + selections.select_ranges([MultiBufferOffset(8)..MultiBufferOffset(15)]); }); editor @@ -7198,14 +7496,7 @@ pub(crate) mod tests { let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( - acp::ContentChunk { - content: acp::ContentBlock::Text(acp::TextContent { - text: "Response".into(), - annotations: None, - meta: None, - }), - meta: None, - }, + acp::ContentChunk::new("Response".into()), )]); let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; @@ -7232,7 +7523,7 @@ pub(crate) mod tests { Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges([8..15]); + selections.select_ranges([MultiBufferOffset(8)..MultiBufferOffset(15)]); }); editor diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 8ace684234e90c5203528cae360a28b30798bea3..24f019c605d1b167e62a6e68dfc1f3ed07c73f1c 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -1,5 +1,5 @@ mod add_llm_provider_modal; -mod configure_context_server_modal; +pub mod configure_context_server_modal; mod configure_context_server_tools_modal; mod manage_profiles_modal; mod tool_picker; @@ -8,10 +8,11 @@ use std::{ops::Range, sync::Arc}; use agent::ContextServerRegistry; use anyhow::Result; +use client::zed_urls; use cloud_llm_client::{Plan, PlanV1, PlanV2}; use collections::HashMap; use context_server::ContextServerId; -use editor::{Editor, SelectionEffects, scroll::Autoscroll}; +use editor::{Editor, MultiBufferOffset, SelectionEffects, scroll::Autoscroll}; use extension::ExtensionManifest; use extension_host::ExtensionStore; use fs::Fs; @@ -26,26 +27,27 @@ use language_model::{ use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ - agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, + agent_server_store::{ + AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME, + }, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ - Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, - ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch, - SwitchColor, Tooltip, WithScrollbar, prelude::*, + ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure, Divider, + DividerColor, ElevationIndex, Indicator, LabelSize, PopoverMenu, Switch, Tooltip, + WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; -use zed_actions::ExtensionCategoryFilter; +use zed_actions::{ExtensionCategoryFilter, OpenBrowser}; pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal; pub(crate) use manage_profiles_modal::ManageProfilesModal; -use crate::{ - AddContextServer, - agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider}, +use crate::agent_configuration::add_llm_provider_modal::{ + AddLlmProviderModal, LlmCompatibleProvider, }; pub struct AgentConfiguration { @@ -415,6 +417,7 @@ impl AgentConfiguration { cx: &mut Context, ) -> impl IntoElement { let providers = LanguageModelRegistry::read_global(cx).providers(); + let popover_menu = PopoverMenu::new("add-provider-popover") .trigger( Button::new("add-provider", "Add Provider") @@ -425,7 +428,6 @@ impl AgentConfiguration { .icon_color(Color::Muted) .label_size(LabelSize::Small), ) - .anchor(gpui::Corner::TopRight) .menu({ let workspace = self.workspace.clone(); move |window, cx| { @@ -447,6 +449,11 @@ impl AgentConfiguration { }) })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -541,12 +548,13 @@ impl AgentConfiguration { .icon_color(Color::Muted) .label_size(LabelSize::Small), ) - .anchor(gpui::Corner::TopRight) .menu({ move |window, cx| { Some(ContextMenu::build(window, cx, |menu, _window, _cx| { menu.entry("Add Custom Server", None, { - |window, cx| window.dispatch_action(AddContextServer.boxed_clone(), cx) + |window, cx| { + window.dispatch_action(crate::AddContextServer.boxed_clone(), cx) + } }) .entry("Install from Extensions", None, { |window, cx| { @@ -564,6 +572,11 @@ impl AgentConfiguration { }) })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -639,7 +652,7 @@ impl AgentConfiguration { let is_running = matches!(server_status, ContextServerStatus::Running); let item_id = SharedString::from(context_server_id.0.clone()); // Servers without a configuration can only be provided by extensions. - let provided_by_extension = server_configuration.is_none_or(|config| { + let provided_by_extension = server_configuration.as_ref().is_none_or(|config| { matches!( config.as_ref(), ContextServerConfiguration::Extension { .. } @@ -695,7 +708,10 @@ impl AgentConfiguration { "Server is stopped.", ), }; - + let is_remote = server_configuration + .as_ref() + .map(|config| matches!(config.as_ref(), ContextServerConfiguration::Http { .. })) + .unwrap_or(false); let context_server_configuration_menu = PopoverMenu::new("context-server-config-menu") .trigger_with_tooltip( IconButton::new("context-server-config-menu", IconName::Settings) @@ -718,14 +734,25 @@ impl AgentConfiguration { let language_registry = language_registry.clone(); let workspace = workspace.clone(); move |window, cx| { - ConfigureContextServerModal::show_modal_for_existing_server( - context_server_id.clone(), - language_registry.clone(), - workspace.clone(), - window, - cx, - ) - .detach_and_log_err(cx); + if is_remote { + crate::agent_configuration::configure_context_server_modal::ConfigureContextServerModal::show_modal_for_existing_server( + context_server_id.clone(), + language_registry.clone(), + workspace.clone(), + window, + cx, + ) + .detach(); + } else { + ConfigureContextServerModal::show_modal_for_existing_server( + context_server_id.clone(), + language_registry.clone(), + workspace.clone(), + window, + cx, + ) + .detach(); + } } }).when(tool_count > 0, |this| this.entry("View Tools", None, { let context_server_id = context_server_id.clone(); @@ -811,7 +838,7 @@ impl AgentConfiguration { .min_w_0() .child( h_flex() - .id(SharedString::from(format!("tooltip-{}", item_id))) + .id(format!("tooltip-{}", item_id)) .h_full() .w_3() .mr_2() @@ -852,7 +879,6 @@ impl AgentConfiguration { .child(context_server_configuration_menu) .child( Switch::new("context-server-switch", is_running.into()) - .color(SwitchColor::Accent) .on_click({ let context_server_manager = self.context_server_store.clone(); let fs = self.fs.clone(); @@ -943,35 +969,104 @@ impl AgentConfiguration { .cloned() .collect::>(); - let user_defined_agents = user_defined_agents + let user_defined_agents: Vec<_> = user_defined_agents .into_iter() .map(|name| { let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) { AgentIcon::Path(icon_path) } else { - AgentIcon::Name(IconName::Ai) + AgentIcon::Name(IconName::Sparkle) }; - self.render_agent_server(icon, name, true) - .into_any_element() + let display_name = agent_server_store + .agent_display_name(&name) + .unwrap_or_else(|| name.0.clone()); + (name, icon, display_name) }) - .collect::>(); + .collect(); - let add_agens_button = Button::new("add-agent", "Add Agent") - .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .label_size(LabelSize::Small) - .on_click(move |_, window, cx| { - if let Some(workspace) = window.root().flatten() { - let workspace = workspace.downgrade(); - window - .spawn(cx, async |cx| { - open_new_agent_servers_entry_in_settings_editor(workspace, cx).await + let add_agent_popover = PopoverMenu::new("add-agent-server-popover") + .trigger( + Button::new("add-agent", "Add Agent") + .style(ButtonStyle::Outlined) + .icon_position(IconPosition::Start) + .icon(IconName::Plus) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small), + ) + .menu({ + move |window, cx| { + Some(ContextMenu::build(window, cx, |menu, _window, _cx| { + menu.entry("Install from Extensions", None, { + |window, cx| { + window.dispatch_action( + zed_actions::Extensions { + category_filter: Some( + ExtensionCategoryFilter::AgentServers, + ), + id: None, + } + .boxed_clone(), + cx, + ) + } }) - .detach_and_log_err(cx); + .entry("Add Custom Agent", None, { + move |window, cx| { + if let Some(workspace) = window.root().flatten() { + let workspace = workspace.downgrade(); + window + .spawn(cx, async |cx| { + open_new_agent_servers_entry_in_settings_editor( + workspace, cx, + ) + .await + }) + .detach_and_log_err(cx); + } + } + }) + .separator() + .header("Learn More") + .item( + ContextMenuEntry::new("Agent Servers Docs") + .icon(IconName::ArrowUpRight) + .icon_color(Color::Muted) + .icon_position(IconPosition::End) + .handler({ + move |window, cx| { + window.dispatch_action( + Box::new(OpenBrowser { + url: zed_urls::agent_server_docs(cx), + }), + cx, + ); + } + }), + ) + .item( + ContextMenuEntry::new("ACP Docs") + .icon(IconName::ArrowUpRight) + .icon_color(Color::Muted) + .icon_position(IconPosition::End) + .handler({ + move |window, cx| { + window.dispatch_action( + Box::new(OpenBrowser { + url: "https://agentclientprotocol.com/".into(), + }), + cx, + ); + } + }), + ) + })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -982,7 +1077,7 @@ impl AgentConfiguration { .child(self.render_section_title( "External Agents", "All agents connected through the Agent Client Protocol.", - add_agens_button.into_any_element(), + add_agent_popover.into_any_element(), )) .child( v_flex() @@ -992,27 +1087,39 @@ impl AgentConfiguration { .child(self.render_agent_server( AgentIcon::Name(IconName::AiClaude), "Claude Code", + "Claude Code", false, + cx, )) .child(Divider::horizontal().color(DividerColor::BorderFaded)) .child(self.render_agent_server( AgentIcon::Name(IconName::AiOpenAi), "Codex CLI", + "Codex CLI", false, + cx, )) .child(Divider::horizontal().color(DividerColor::BorderFaded)) .child(self.render_agent_server( AgentIcon::Name(IconName::AiGemini), "Gemini CLI", + "Gemini CLI", false, + cx, )) .map(|mut parent| { - for agent in user_defined_agents { + for (name, icon, display_name) in user_defined_agents { parent = parent .child( Divider::horizontal().color(DividerColor::BorderFaded), ) - .child(agent); + .child(self.render_agent_server( + icon, + name, + display_name, + true, + cx, + )); } parent }), @@ -1023,10 +1130,14 @@ impl AgentConfiguration { fn render_agent_server( &self, icon: AgentIcon, - name: impl Into, + id: impl Into, + display_name: impl Into, external: bool, + cx: &mut Context, ) -> impl IntoElement { - let name = name.into(); + let id = id.into(); + let display_name = display_name.into(); + let icon = match icon { AgentIcon::Name(icon_name) => Icon::new(icon_name) .size(IconSize::Small) @@ -1036,31 +1147,59 @@ impl AgentConfiguration { .color(Color::Muted), }; - let tooltip_id = SharedString::new(format!("agent-source-{}", name)); - let tooltip_message = format!("The {} agent was installed from an extension.", name); + let tooltip_id = SharedString::new(format!("agent-source-{}", id)); + let tooltip_message = format!( + "The {} agent was installed from an extension.", + display_name + ); + + let agent_server_name = ExternalAgentServerName(id.clone()); + + let uninstall_btn_id = SharedString::from(format!("uninstall-{}", id)); + let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Uninstall Agent Extension")) + .on_click(cx.listener(move |this, _, _window, cx| { + let agent_name = agent_server_name.clone(); + + if let Some(ext_id) = this.agent_server_store.update(cx, |store, _cx| { + store.get_extension_id_for_agent(&agent_name) + }) { + ExtensionStore::global(cx) + .update(cx, |store, cx| store.uninstall_extension(ext_id, cx)) + .detach_and_log_err(cx); + } + })); h_flex() - .gap_1p5() - .child(icon) - .child(Label::new(name)) - .when(external, |this| { - this.child( - div() - .id(tooltip_id) - .flex_none() - .tooltip(Tooltip::text(tooltip_message)) - .child( - Icon::new(IconName::ZedSrcExtension) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - }) + .gap_1() + .justify_between() .child( - Icon::new(IconName::Check) - .color(Color::Success) - .size(IconSize::Small), + h_flex() + .gap_1p5() + .child(icon) + .child(Label::new(display_name)) + .when(external, |this| { + this.child( + div() + .id(tooltip_id) + .flex_none() + .tooltip(Tooltip::text(tooltip_message)) + .child( + Icon::new(IconName::ZedSrcExtension) + .size(IconSize::Small) + .color(Color::Muted), + ), + ) + }) + .child( + Icon::new(IconName::Check) + .color(Color::Success) + .size(IconSize::Small), + ), ) + .when(external, |this| this.child(uninstall_button)) } } @@ -1087,7 +1226,7 @@ impl Render for AgentConfiguration { .child(self.render_context_servers_section(window, cx)) .child(self.render_provider_configuration_section(cx)), ) - .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx), + .vertical_scrollbar_for(&self.scroll_handle, window, cx), ) } } @@ -1221,11 +1360,12 @@ async fn open_new_agent_servers_entry_in_settings_editor( .custom .insert( server_name, - settings::CustomAgentServerSettings { + settings::CustomAgentServerSettings::Custom { path: "path_to_executable".into(), args: vec![], env: Some(HashMap::default()), default_mode: None, + default_model: None, }, ); } @@ -1240,7 +1380,15 @@ async fn open_new_agent_servers_entry_in_settings_editor( .map(|(range, _)| range.clone()) .collect::>(); - item.edit(edits, cx); + item.edit( + edits.into_iter().map(|(range, s)| { + ( + MultiBufferOffset(range.start)..MultiBufferOffset(range.end), + s, + ) + }), + cx, + ); if let Some((unique_server_name, buffer)) = unique_server_name.zip(item.buffer().read(cx).as_singleton()) { @@ -1253,7 +1401,9 @@ async fn open_new_agent_servers_entry_in_settings_editor( window, cx, |selections| { - selections.select_ranges(vec![range]); + selections.select_ranges(vec![ + MultiBufferOffset(range.start)..MultiBufferOffset(range.end), + ]); }, ); } diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index f41b59132d8c0dfe5ed39fe84775e61b8131cc0a..e443df33b4ddcaeba32b9b2623c0fdca85fac51c 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -3,16 +3,42 @@ use std::sync::Arc; use anyhow::Result; use collections::HashSet; use fs::Fs; -use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task}; +use gpui::{ + DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, ScrollHandle, Task, +}; use language_model::LanguageModelRegistry; use language_models::provider::open_ai_compatible::{AvailableModel, ModelCapabilities}; use settings::{OpenAiCompatibleSettingsContent, update_settings_file}; use ui::{ - Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*, + Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, + WithScrollbar, prelude::*, }; use ui_input::InputField; use workspace::{ModalView, Workspace}; +fn single_line_input( + label: impl Into, + placeholder: impl Into, + text: Option<&str>, + tab_index: isize, + window: &mut Window, + cx: &mut App, +) -> Entity { + cx.new(|cx| { + let input = InputField::new(window, cx, placeholder) + .label(label) + .tab_index(tab_index) + .tab_stop(true); + + if let Some(text) = text { + input + .editor() + .update(cx, |editor, cx| editor.set_text(text, window, cx)); + } + input + }) +} + #[derive(Clone, Copy)] pub enum LlmCompatibleProvider { OpenAi, @@ -41,12 +67,14 @@ struct AddLlmProviderInput { impl AddLlmProviderInput { fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self { - let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx); - let api_url = single_line_input("API URL", provider.api_url(), None, window, cx); + let provider_name = + single_line_input("Provider Name", provider.name(), None, 1, window, cx); + let api_url = single_line_input("API URL", provider.api_url(), None, 2, window, cx); let api_key = single_line_input( "API Key", "000000000000000000000000000000000000000000000000", None, + 3, window, cx, ); @@ -55,12 +83,13 @@ impl AddLlmProviderInput { provider_name, api_url, api_key, - models: vec![ModelInput::new(window, cx)], + models: vec![ModelInput::new(0, window, cx)], } } fn add_model(&mut self, window: &mut Window, cx: &mut App) { - self.models.push(ModelInput::new(window, cx)); + let model_index = self.models.len(); + self.models.push(ModelInput::new(model_index, window, cx)); } fn remove_model(&mut self, index: usize) { @@ -84,11 +113,14 @@ struct ModelInput { } impl ModelInput { - fn new(window: &mut Window, cx: &mut App) -> Self { + fn new(model_index: usize, window: &mut Window, cx: &mut App) -> Self { + let base_tab_index = (3 + (model_index * 4)) as isize; + let model_name = single_line_input( "Model Name", "e.g. gpt-4o, claude-opus-4, gemini-2.5-pro", None, + base_tab_index + 1, window, cx, ); @@ -96,6 +128,7 @@ impl ModelInput { "Max Completion Tokens", "200000", Some("200000"), + base_tab_index + 2, window, cx, ); @@ -103,16 +136,26 @@ impl ModelInput { "Max Output Tokens", "Max Output Tokens", Some("32000"), + base_tab_index + 3, window, cx, ); - let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx); + let max_tokens = single_line_input( + "Max Tokens", + "Max Tokens", + Some("200000"), + base_tab_index + 4, + window, + cx, + ); + let ModelCapabilities { tools, images, parallel_tool_calls, prompt_cache_key, } = ModelCapabilities::default(); + Self { name: model_name, max_completion_tokens, @@ -165,24 +208,6 @@ impl ModelInput { } } -fn single_line_input( - label: impl Into, - placeholder: impl Into, - text: Option<&str>, - window: &mut Window, - cx: &mut App, -) -> Entity { - cx.new(|cx| { - let input = InputField::new(window, cx, placeholder).label(label); - if let Some(text) = text { - input - .editor() - .update(cx, |editor, cx| editor.set_text(text, window, cx)); - } - input - }) -} - fn save_provider_to_settings( input: &AddLlmProviderInput, cx: &mut App, @@ -258,6 +283,7 @@ fn save_provider_to_settings( pub struct AddLlmProviderModal { provider: LlmCompatibleProvider, input: AddLlmProviderInput, + scroll_handle: ScrollHandle, focus_handle: FocusHandle, last_error: Option, } @@ -278,6 +304,7 @@ impl AddLlmProviderModal { provider, last_error: None, focus_handle: cx.focus_handle(), + scroll_handle: ScrollHandle::new(), } } @@ -418,6 +445,19 @@ impl AddLlmProviderModal { ) }) } + + fn on_tab(&mut self, _: &menu::SelectNext, window: &mut Window, cx: &mut Context) { + window.focus_next(cx); + } + + fn on_tab_prev( + &mut self, + _: &menu::SelectPrevious, + window: &mut Window, + cx: &mut Context, + ) { + window.focus_prev(cx); + } } impl EventEmitter for AddLlmProviderModal {} @@ -431,17 +471,29 @@ impl Focusable for AddLlmProviderModal { impl ModalView for AddLlmProviderModal {} impl Render for AddLlmProviderModal { - fn render(&mut self, _window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { + fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { let focus_handle = self.focus_handle(cx); - div() + let window_size = window.viewport_size(); + let rem_size = window.rem_size(); + let is_large_window = window_size.height / rem_size > rems_from_px(600.).0; + + let modal_max_height = if is_large_window { + rems_from_px(450.) + } else { + rems_from_px(200.) + }; + + v_flex() .id("add-llm-provider-modal") .key_context("AddLlmProviderModal") .w(rems(34.)) .elevation_3(cx) .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(Self::on_tab)) + .on_action(cx.listener(Self::on_tab_prev)) .capture_any_mouse_down(cx.listener(|this, _, window, cx| { - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); })) .child( Modal::new("configure-context-server", None) @@ -462,17 +514,25 @@ impl Render for AddLlmProviderModal { ) }) .child( - v_flex() - .id("modal_content") + div() .size_full() - .max_h_128() - .overflow_y_scroll() - .px(DynamicSpacing::Base12.rems(cx)) - .gap(DynamicSpacing::Base04.rems(cx)) - .child(self.input.provider_name.clone()) - .child(self.input.api_url.clone()) - .child(self.input.api_key.clone()) - .child(self.render_model_section(cx)), + .vertical_scrollbar_for(&self.scroll_handle, window, cx) + .child( + v_flex() + .id("modal_content") + .size_full() + .tab_group() + .max_h(modal_max_height) + .pl_3() + .pr_4() + .gap_2() + .overflow_y_scroll() + .track_scroll(&self.scroll_handle) + .child(self.input.provider_name.clone()) + .child(self.input.api_url.clone()) + .child(self.input.api_key.clone()) + .child(self.render_model_section(cx)), + ), ) .footer( ModalFooter::new().end_slot( @@ -642,7 +702,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let model_input = ModelInput::new(window, cx); + let model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -678,7 +738,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let mut model_input = ModelInput::new(window, cx); + let mut model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -703,7 +763,7 @@ mod tests { let cx = setup_test(cx).await; cx.update(|window, cx| { - let mut model_input = ModelInput::new(window, cx); + let mut model_input = ModelInput::new(0, window, cx); model_input.name.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text("somemodel", window, cx); @@ -767,7 +827,7 @@ mod tests { models.iter().enumerate() { if i >= input.models.len() { - input.models.push(ModelInput::new(window, cx)); + input.models.push(ModelInput::new(i, window, cx)); } let model = &mut input.models[i]; set_text(&model.name, name, window, cx); diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 88896f51086dc5f7d3eddb2fffef2fa3a7039c79..b30f1494f0d4dcbf3ef63cc7f549d16374f4899b 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -1,14 +1,12 @@ -use std::{ - path::PathBuf, - sync::{Arc, Mutex}, -}; +use std::sync::{Arc, Mutex}; use anyhow::{Context as _, Result}; +use collections::HashMap; use context_server::{ContextServerCommand, ContextServerId}; use editor::{Editor, EditorElement, EditorStyle}; use gpui::{ - AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, - TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, + AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, + Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, }; use language::{Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; @@ -20,10 +18,12 @@ use project::{ project_settings::{ContextServerSettings, ProjectSettings}, worktree_store::WorktreeStore, }; +use serde::Deserialize; use settings::{Settings as _, update_settings_file}; use theme::ThemeSettings; use ui::{ - CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*, + CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, + WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{ModalView, Workspace}; @@ -36,6 +36,11 @@ enum ConfigurationTarget { id: ContextServerId, command: ContextServerCommand, }, + ExistingHttp { + id: ContextServerId, + url: String, + headers: HashMap, + }, Extension { id: ContextServerId, repository_url: Option, @@ -46,9 +51,11 @@ enum ConfigurationTarget { enum ConfigurationSource { New { editor: Entity, + is_http: bool, }, Existing { editor: Entity, + is_http: bool, }, Extension { id: ContextServerId, @@ -96,6 +103,7 @@ impl ConfigurationSource { match target { ConfigurationTarget::New => ConfigurationSource::New { editor: create_editor(context_server_input(None), jsonc_language, window, cx), + is_http: false, }, ConfigurationTarget::Existing { id, command } => ConfigurationSource::Existing { editor: create_editor( @@ -104,6 +112,20 @@ impl ConfigurationSource { window, cx, ), + is_http: false, + }, + ConfigurationTarget::ExistingHttp { + id, + url, + headers: auth, + } => ConfigurationSource::Existing { + editor: create_editor( + context_server_http_input(Some((id, url, auth))), + jsonc_language, + window, + cx, + ), + is_http: true, }, ConfigurationTarget::Extension { id, @@ -140,16 +162,30 @@ impl ConfigurationSource { fn output(&self, cx: &mut App) -> Result<(ContextServerId, ContextServerSettings)> { match self { - ConfigurationSource::New { editor } | ConfigurationSource::Existing { editor } => { - parse_input(&editor.read(cx).text(cx)).map(|(id, command)| { - ( - id, - ContextServerSettings::Custom { - enabled: true, - command, - }, - ) - }) + ConfigurationSource::New { editor, is_http } + | ConfigurationSource::Existing { editor, is_http } => { + if *is_http { + parse_http_input(&editor.read(cx).text(cx)).map(|(id, url, auth)| { + ( + id, + ContextServerSettings::Http { + enabled: true, + url, + headers: auth, + }, + ) + }) + } else { + parse_input(&editor.read(cx).text(cx)).map(|(id, command)| { + ( + id, + ContextServerSettings::Stdio { + enabled: true, + command, + }, + ) + }) + } } ConfigurationSource::Extension { id, @@ -185,11 +221,12 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand) Some((id, cmd)) => { let args = serde_json::to_string(&cmd.args).unwrap(); let env = serde_json::to_string(&cmd.env.unwrap_or_default()).unwrap(); - (id.0.to_string(), cmd.path, args, env) + let cmd_path = serde_json::to_string(&cmd.path).unwrap(); + (id.0.to_string(), cmd_path, args, env) } None => ( "some-mcp-server".to_string(), - PathBuf::new(), + "".to_string(), "[]".to_string(), "{}".to_string(), ), @@ -200,17 +237,76 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand) /// The name of your MCP server "{name}": {{ /// The command which runs the MCP server - "command": "{}", + "command": {command}, /// The arguments to pass to the MCP server "args": {args}, /// The environment variables to set "env": {env} }} -}}"#, - command.display() +}}"# + ) +} + +fn context_server_http_input( + existing: Option<(ContextServerId, String, HashMap)>, +) -> String { + let (name, url, headers) = match existing { + Some((id, url, headers)) => { + let header = if headers.is_empty() { + r#"// "Authorization": "Bearer "#.to_string() + } else { + let json = serde_json::to_string_pretty(&headers).unwrap(); + let mut lines = json.split("\n").collect::>(); + if lines.len() > 1 { + lines.remove(0); + lines.pop(); + } + lines + .into_iter() + .map(|line| format!(" {}", line)) + .collect::() + }; + (id.0.to_string(), url, header) + } + None => ( + "some-remote-server".to_string(), + "https://example.com/mcp".to_string(), + r#"// "Authorization": "Bearer "#.to_string(), + ), + }; + + format!( + r#"{{ + /// The name of your remote MCP server + "{name}": {{ + /// The URL of the remote MCP server + "url": "{url}", + "headers": {{ + /// Any headers to send along + {headers} + }} + }} +}}"# ) } +fn parse_http_input(text: &str) -> Result<(ContextServerId, String, HashMap)> { + #[derive(Deserialize)] + struct Temp { + url: String, + #[serde(default)] + headers: HashMap, + } + let value: HashMap = serde_json_lenient::from_str(text)?; + if value.len() != 1 { + anyhow::bail!("Expected exactly one context server configuration"); + } + + let (key, value) = value.into_iter().next().unwrap(); + + Ok((ContextServerId(key.into()), value.url, value.headers)) +} + fn resolve_context_server_extension( id: ContextServerId, worktree_store: Entity, @@ -252,6 +348,7 @@ pub struct ConfigureContextServerModal { source: ConfigurationSource, state: State, original_server_id: Option, + scroll_handle: ScrollHandle, } impl ConfigureContextServerModal { @@ -303,13 +400,22 @@ impl ConfigureContextServerModal { window.spawn(cx, async move |cx| { let target = match settings { - ContextServerSettings::Custom { + ContextServerSettings::Stdio { enabled: _, command, } => Some(ConfigurationTarget::Existing { id: server_id, command, }), + ContextServerSettings::Http { + enabled: _, + url, + headers, + } => Some(ConfigurationTarget::ExistingHttp { + id: server_id, + url, + headers, + }), ContextServerSettings::Extension { .. } => { match workspace .update(cx, |workspace, cx| { @@ -351,6 +457,7 @@ impl ConfigureContextServerModal { state: State::Idle, original_server_id: match &target { ConfigurationTarget::Existing { id, .. } => Some(id.clone()), + ConfigurationTarget::ExistingHttp { id, .. } => Some(id.clone()), ConfigurationTarget::Extension { id, .. } => Some(id.clone()), ConfigurationTarget::New => None, }, @@ -361,6 +468,7 @@ impl ConfigureContextServerModal { window, cx, ), + scroll_handle: ScrollHandle::new(), }) }) }) @@ -478,7 +586,7 @@ impl ModalView for ConfigureContextServerModal {} impl Focusable for ConfigureContextServerModal { fn focus_handle(&self, cx: &App) -> FocusHandle { match &self.source { - ConfigurationSource::New { editor } => editor.focus_handle(cx), + ConfigurationSource::New { editor, .. } => editor.focus_handle(cx), ConfigurationSource::Existing { editor, .. } => editor.focus_handle(cx), ConfigurationSource::Extension { editor, .. } => editor .as_ref() @@ -525,8 +633,8 @@ impl ConfigureContextServerModal { fn render_modal_content(&self, cx: &App) -> AnyElement { let editor = match &self.source { - ConfigurationSource::New { editor } => editor, - ConfigurationSource::Existing { editor } => editor, + ConfigurationSource::New { editor, .. } => editor, + ConfigurationSource::Existing { editor, .. } => editor, ConfigurationSource::Extension { editor, .. } => { let Some(editor) = editor else { return div().into_any_element(); @@ -598,6 +706,36 @@ impl ConfigureContextServerModal { move |_, _, cx| cx.open_url(&repository_url) }), ) + } else if let ConfigurationSource::New { is_http, .. } = &self.source { + let label = if *is_http { + "Configure Local" + } else { + "Configure Remote" + }; + let tooltip = if *is_http { + "Configure an MCP server that runs on stdin/stdout." + } else { + "Configure an MCP server that you connect to over HTTP" + }; + + Some( + Button::new("toggle-kind", label) + .tooltip(Tooltip::text(tooltip)) + .on_click(cx.listener(|this, _, window, cx| match &mut this.source { + ConfigurationSource::New { editor, is_http } => { + *is_http = !*is_http; + let new_text = if *is_http { + context_server_http_input(None) + } else { + context_server_input(None) + }; + editor.update(cx, |editor, cx| { + editor.set_text(new_text, window, cx); + }) + } + _ => {} + })), + ) } else { None }, @@ -693,20 +831,35 @@ impl Render for ConfigureContextServerModal { }), ) .capture_any_mouse_down(cx.listener(|this, _, window, cx| { - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); })) .child( Modal::new("configure-context-server", None) .header(self.render_modal_header()) .section( - Section::new() - .child(self.render_modal_description(window, cx)) - .child(self.render_modal_content(cx)) - .child(match &self.state { - State::Idle => div(), - State::Waiting => Self::render_waiting_for_context_server(), - State::Error(error) => Self::render_modal_error(error.clone()), - }), + Section::new().child( + div() + .size_full() + .child( + div() + .id("modal-content") + .max_h(vh(0.7, window)) + .overflow_y_scroll() + .track_scroll(&self.scroll_handle) + .child(self.render_modal_description(window, cx)) + .child(self.render_modal_content(cx)) + .child(match &self.state { + State::Idle => div(), + State::Waiting => { + Self::render_waiting_for_context_server() + } + State::Error(error) => { + Self::render_modal_error(error.clone()) + } + }), + ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx), + ), ) .footer(self.render_modal_footer(cx)), ) diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs index 3fe0b8d1b1400b4362192261995ed5b6bd1cb662..5115e2f70c0ae87cdd3ca3901a64aed09de68b0f 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs @@ -87,7 +87,7 @@ impl ConfigureContextServerToolsModal { v_flex() .child( h_flex() - .id(SharedString::from(format!("tool-header-{}", index))) + .id(format!("tool-header-{}", index)) .py_1() .pl_1() .pr_2() @@ -138,7 +138,7 @@ impl ConfigureContextServerToolsModal { items })), ) - .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) .into_any_element() } } diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index 210cf5f5dd6612855b32e358a2d3ec38e8259373..c7f395ebbd813cfd7c28f33a7e69ec32f6d90fca 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -8,6 +8,7 @@ use editor::Editor; use fs::Fs; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*}; use language_model::{LanguageModel, LanguageModelRegistry}; +use settings::SettingsStore; use settings::{ LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file, }; @@ -94,6 +95,7 @@ pub struct ViewProfileMode { configure_default_model: NavigableEntry, configure_tools: NavigableEntry, configure_mcps: NavigableEntry, + delete_profile: NavigableEntry, cancel_item: NavigableEntry, } @@ -109,6 +111,7 @@ pub struct ManageProfilesModal { active_model: Option>, focus_handle: FocusHandle, mode: Mode, + _settings_subscription: Subscription, } impl ManageProfilesModal { @@ -148,18 +151,29 @@ impl ManageProfilesModal { ) -> Self { let focus_handle = cx.focus_handle(); + // Keep this modal in sync with settings changes (including profile deletion). + let settings_subscription = + cx.observe_global_in::(window, |this, window, cx| { + if matches!(this.mode, Mode::ChooseProfile(_)) { + this.mode = Mode::choose_profile(window, cx); + this.focus_handle(cx).focus(window, cx); + cx.notify(); + } + }); + Self { fs, active_model, context_server_registry, focus_handle, mode: Mode::choose_profile(window, cx), + _settings_subscription: settings_subscription, } } fn choose_profile(&mut self, window: &mut Window, cx: &mut Context) { self.mode = Mode::choose_profile(window, cx); - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } fn new_profile( @@ -177,7 +191,7 @@ impl ManageProfilesModal { name_editor, base_profile_id, }); - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } pub fn view_profile( @@ -192,9 +206,10 @@ impl ManageProfilesModal { configure_default_model: NavigableEntry::focusable(cx), configure_tools: NavigableEntry::focusable(cx), configure_mcps: NavigableEntry::focusable(cx), + delete_profile: NavigableEntry::focusable(cx), cancel_item: NavigableEntry::focusable(cx), }); - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } fn configure_default_model( @@ -207,7 +222,6 @@ impl ManageProfilesModal { let profile_id_for_closure = profile_id.clone(); let model_picker = cx.new(|cx| { - let fs = fs.clone(); let profile_id = profile_id_for_closure.clone(); language_model_selector( @@ -235,24 +249,39 @@ impl ManageProfilesModal { }) } }, - move |model, cx| { - let provider = model.provider_id().0.to_string(); - let model_id = model.id().0.to_string(); - let profile_id = profile_id.clone(); + { + let fs = fs.clone(); + move |model, cx| { + let provider = model.provider_id().0.to_string(); + let model_id = model.id().0.to_string(); + let profile_id = profile_id.clone(); - update_settings_file(fs.clone(), cx, move |settings, _cx| { - let agent_settings = settings.agent.get_or_insert_default(); - if let Some(profiles) = agent_settings.profiles.as_mut() { - if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) { - profile.default_model = Some(LanguageModelSelection { - provider: LanguageModelProviderSetting(provider.clone()), - model: model_id.clone(), - }); + update_settings_file(fs.clone(), cx, move |settings, _cx| { + let agent_settings = settings.agent.get_or_insert_default(); + if let Some(profiles) = agent_settings.profiles.as_mut() { + if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) { + profile.default_model = Some(LanguageModelSelection { + provider: LanguageModelProviderSetting(provider.clone()), + model: model_id.clone(), + }); + } } - } - }); + }); + } + }, + { + let fs = fs.clone(); + move |model, should_be_favorite, cx| { + crate::favorite_models::toggle_in_settings( + model, + should_be_favorite, + fs.clone(), + cx, + ); + } }, false, // Do not use popover styles for the model picker + self.focus_handle.clone(), window, cx, ) @@ -271,7 +300,7 @@ impl ManageProfilesModal { model_picker, _subscription: dismiss_subscription, }; - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } fn configure_mcp_tools( @@ -307,7 +336,7 @@ impl ManageProfilesModal { tool_picker, _subscription: dismiss_subscription, }; - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } fn configure_builtin_tools( @@ -348,7 +377,7 @@ impl ManageProfilesModal { tool_picker, _subscription: dismiss_subscription, }; - self.focus_handle(cx).focus(window); + self.focus_handle(cx).focus(window, cx); } fn confirm(&mut self, window: &mut Window, cx: &mut Context) { @@ -368,6 +397,42 @@ impl ManageProfilesModal { } } + fn delete_profile( + &mut self, + profile_id: AgentProfileId, + window: &mut Window, + cx: &mut Context, + ) { + if builtin_profiles::is_builtin(&profile_id) { + self.view_profile(profile_id, window, cx); + return; + } + + let fs = self.fs.clone(); + + update_settings_file(fs, cx, move |settings, _cx| { + let Some(agent_settings) = settings.agent.as_mut() else { + return; + }; + + let Some(profiles) = agent_settings.profiles.as_mut() else { + return; + }; + + profiles.shift_remove(profile_id.0.as_ref()); + + if agent_settings + .default_profile + .as_deref() + .is_some_and(|default_profile| default_profile == profile_id.0.as_ref()) + { + agent_settings.default_profile = Some(AgentProfileId::default().0); + } + }); + + self.choose_profile(window, cx); + } + fn cancel(&mut self, window: &mut Window, cx: &mut Context) { match &self.mode { Mode::ChooseProfile { .. } => { @@ -421,7 +486,7 @@ impl ManageProfilesModal { let is_focused = profile.navigation.focus_handle.contains_focused(window, cx); div() - .id(SharedString::from(format!("profile-{}", profile.id))) + .id(format!("profile-{}", profile.id)) .track_focus(&profile.navigation.focus_handle) .on_action({ let profile_id = profile.id.clone(); @@ -430,7 +495,7 @@ impl ManageProfilesModal { }) }) .child( - ListItem::new(SharedString::from(format!("profile-{}", profile.id))) + ListItem::new(format!("profile-{}", profile.id)) .toggle_state(is_focused) .inset(true) .spacing(ListItemSpacing::Sparse) @@ -755,6 +820,40 @@ impl ManageProfilesModal { }), ), ) + .child( + div() + .id("delete-profile") + .track_focus(&mode.delete_profile.focus_handle) + .on_action({ + let profile_id = mode.profile_id.clone(); + cx.listener(move |this, _: &menu::Confirm, window, cx| { + this.delete_profile(profile_id.clone(), window, cx); + }) + }) + .child( + ListItem::new("delete-profile") + .toggle_state( + mode.delete_profile + .focus_handle + .contains_focused(window, cx), + ) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .start_slot( + Icon::new(IconName::Trash) + .size(IconSize::Small) + .color(Color::Error), + ) + .child(Label::new("Delete Profile").color(Color::Error)) + .disabled(builtin_profiles::is_builtin(&mode.profile_id)) + .on_click({ + let profile_id = mode.profile_id.clone(); + cx.listener(move |this, _, window, cx| { + this.delete_profile(profile_id.clone(), window, cx); + }) + }), + ), + ) .child(ListSeparator) .child( div() @@ -804,6 +903,7 @@ impl ManageProfilesModal { .entry(mode.configure_default_model) .entry(mode.configure_tools) .entry(mode.configure_mcps) + .entry(mode.delete_profile) .entry(mode.cancel_item) } } @@ -851,7 +951,7 @@ impl Render for ManageProfilesModal { .on_action(cx.listener(|this, _: &menu::Cancel, window, cx| this.cancel(window, cx))) .on_action(cx.listener(|this, _: &menu::Confirm, window, cx| this.confirm(window, cx))) .capture_any_mouse_down(cx.listener(|this, _, window, cx| { - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); })) .on_mouse_down_out(cx.listener(|_this, _, _, cx| cx.emit(DismissEvent))) .child(match &self.mode { diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 85e3264458364f10d1c90d7c18c3609c9c7a7fd4..91d345b7ebb9dae5225626d7a054d0de1882dfe0 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -13,8 +13,8 @@ use editor::{ scroll::Autoscroll, }; use gpui::{ - Action, AnyElement, AnyView, App, AppContext, Empty, Entity, EventEmitter, FocusHandle, - Focusable, Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*, + Action, AnyElement, App, AppContext, Empty, Entity, EventEmitter, FocusHandle, Focusable, + Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*, }; use language::{Buffer, Capability, DiskState, OffsetRangeExt, Point}; @@ -130,7 +130,12 @@ impl AgentDiffPane { .action_log() .read(cx) .changed_buffers(cx); - let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::>(); + let mut paths_to_delete = self + .multibuffer + .read(cx) + .paths() + .cloned() + .collect::>(); for (buffer, diff_handle) in changed_buffers { if buffer.read(cx).file().is_none() { @@ -145,7 +150,7 @@ impl AgentDiffPane { let diff_hunk_ranges = diff .hunks_intersecting_range( - language::Anchor::MIN..language::Anchor::MAX, + language::Anchor::min_max_range_for_buffer(snapshot.remote_id()), &snapshot, cx, ) @@ -207,10 +212,10 @@ impl AgentDiffPane { .focus_handle(cx) .contains_focused(window, cx) { - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } else if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() { self.editor.update(cx, |editor, cx| { - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); }); } } @@ -493,7 +498,7 @@ impl Item for AgentDiffPane { Some("Assistant Diff Opened") } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } @@ -580,11 +585,11 @@ impl Item for AgentDiffPane { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } @@ -869,12 +874,12 @@ impl AgentDiffToolbar { match active_item { AgentDiffToolbarItem::Pane(agent_diff) => { if let Some(agent_diff) = agent_diff.upgrade() { - agent_diff.focus_handle(cx).focus(window); + agent_diff.focus_handle(cx).focus(window, cx); } } AgentDiffToolbarItem::Editor { editor, .. } => { if let Some(editor) = editor.upgrade() { - editor.read(cx).focus_handle(cx).focus(window); + editor.read(cx).focus_handle(cx).focus(window, cx); } } } diff --git a/crates/agent_ui/src/agent_model_selector.rs b/crates/agent_ui/src/agent_model_selector.rs index 900ca0b683670a30b3353655d17c2ef79cd5523b..ac57ed575d9d1b6de2c53d3e0e4a91b4bd16ab1a 100644 --- a/crates/agent_ui/src/agent_model_selector.rs +++ b/crates/agent_ui/src/agent_model_selector.rs @@ -25,29 +25,45 @@ impl AgentModelSelector { window: &mut Window, cx: &mut Context, ) -> Self { + let focus_handle_clone = focus_handle.clone(); + Self { selector: cx.new(move |cx| { - let fs = fs.clone(); language_model_selector( { let model_context = model_usage_context.clone(); move |cx| model_context.configured_model(cx) }, - move |model, cx| { - let provider = model.provider_id().0.to_string(); - let model_id = model.id().0.to_string(); - match &model_usage_context { - ModelUsageContext::InlineAssistant => { - update_settings_file(fs.clone(), cx, move |settings, _cx| { - settings - .agent - .get_or_insert_default() - .set_inline_assistant_model(provider.clone(), model_id); - }); + { + let fs = fs.clone(); + move |model, cx| { + let provider = model.provider_id().0.to_string(); + let model_id = model.id().0.to_string(); + match &model_usage_context { + ModelUsageContext::InlineAssistant => { + update_settings_file(fs.clone(), cx, move |settings, _cx| { + settings + .agent + .get_or_insert_default() + .set_inline_assistant_model(provider.clone(), model_id); + }); + } } } }, + { + let fs = fs.clone(); + move |model, should_be_favorite, cx| { + crate::favorite_models::toggle_in_settings( + model, + should_be_favorite, + fs.clone(), + cx, + ); + } + }, true, // Use popover styles for picker + focus_handle_clone, window, cx, ) @@ -60,6 +76,10 @@ impl AgentModelSelector { pub fn toggle(&self, window: &mut Window, cx: &mut Context) { self.menu_handle.toggle(window, cx); } + + pub fn active_model(&self, cx: &App) -> Option { + self.selector.read(cx).delegate.active_model(cx) + } } impl Render for AgentModelSelector { @@ -95,7 +115,7 @@ impl Render for AgentModelSelector { .child( Icon::new(IconName::ChevronDown) .color(color) - .size(IconSize::XSmall), + .size(IconSize::Small), ), move |_window, cx| { Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 58839d5d8df2a6e2e149800ecf47b30c3383bc0b..294cd8b4888950f6ea92d6bea1eba78c3d6d6de2 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,16 +1,12 @@ -use std::ops::Range; -use std::path::Path; -use std::rc::Rc; -use std::sync::Arc; +use std::{ops::Range, path::Path, rc::Rc, sync::Arc, time::Duration}; use acp_thread::AcpThread; use agent::{ContextServerRegistry, DbThreadMetadata, HistoryEntry, HistoryStore}; +use agent_servers::AgentServer; use db::kvp::{Dismissable, KEY_VALUE_STORE}; use project::{ ExternalAgentServerName, - agent_server_store::{ - AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME, - }, + agent_server_store::{CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, }; use serde::{Deserialize, Serialize}; use settings::{ @@ -19,12 +15,12 @@ use settings::{ use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent}; +use crate::ManageProfiles; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ - AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant, - NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, - ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, ToggleNewThreadMenu, - ToggleOptionsMenu, + AddContextServer, AgentDiffPane, Follow, InlineAssistant, NewTextThread, NewThread, + OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, + ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, acp::AcpThreadView, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, slash_command::SlashCommandCompletionProvider, @@ -35,10 +31,7 @@ use crate::{ ExpandMessageEditor, acp::{AcpThreadHistory, ThreadHistoryEvent}, }; -use crate::{ - ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command, -}; -use crate::{ManageProfiles, context_store::ContextStore}; +use crate::{ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary}; use agent_settings::AgentSettings; use ai_onboarding::AgentPanelOnboarding; use anyhow::{Result, anyhow}; @@ -51,9 +44,9 @@ use extension::ExtensionEvents; use extension_host::ExtensionStore; use fs::Fs; use gpui::{ - Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter, - ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, Task, UpdateGlobal, - WeakEntity, prelude::*, + Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, + Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, + Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, }; use language::LanguageRegistry; use language_model::{ConfigurationError, LanguageModelRegistry}; @@ -61,12 +54,11 @@ use project::{Project, ProjectPath, Worktree}; use prompt_store::{PromptBuilder, PromptStore, UserPromptId}; use rules_library::{RulesLibrary, open_rules_library}; use search::{BufferSearchBar, buffer_search}; -use settings::{Settings, SettingsStore, update_settings_file}; +use settings::{Settings, update_settings_file}; use theme::ThemeSettings; -use ui::utils::WithRemSize; use ui::{ Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, - ProgressBar, Tab, Tooltip, prelude::*, + ProgressBar, Tab, Tooltip, prelude::*, utils::WithRemSize, }; use util::ResultExt as _; use workspace::{ @@ -248,7 +240,6 @@ pub enum AgentType { Codex, Custom { name: SharedString, - command: AgentServerCommand, }, } @@ -269,7 +260,7 @@ impl AgentType { Self::Gemini => Some(IconName::AiGemini), Self::ClaudeCode => Some(IconName::AiClaude), Self::Codex => Some(IconName::AiOpenAi), - Self::Custom { .. } => Some(IconName::Terminal), + Self::Custom { .. } => Some(IconName::Sparkle), } } } @@ -280,7 +271,7 @@ impl From for AgentType { ExternalAgent::Gemini => Self::Gemini, ExternalAgent::ClaudeCode => Self::ClaudeCode, ExternalAgent::Codex => Self::Codex, - ExternalAgent::Custom { name, command } => Self::Custom { name, command }, + ExternalAgent::Custom { name } => Self::Custom { name }, ExternalAgent::NativeAgent => Self::NativeAgent, } } @@ -297,7 +288,7 @@ impl ActiveView { } } - pub fn native_agent( + fn native_agent( fs: Arc, prompt_store: Option>, history_store: Entity, @@ -315,6 +306,7 @@ impl ActiveView { project, history_store, prompt_store, + false, window, cx, ) @@ -436,7 +428,6 @@ pub struct AgentPanel { text_thread_store: Entity, prompt_store: Option>, context_server_registry: Entity, - inline_assist_context_store: Entity, configuration: Option>, configuration_subscription: Option, active_view: ActiveView, @@ -452,6 +443,7 @@ pub struct AgentPanel { pending_serialization: Option>>, onboarding: Entity, selected_agent: AgentType, + show_trust_workspace_message: bool, } impl AgentPanel { @@ -548,7 +540,6 @@ impl AgentPanel { let client = workspace.client().clone(); let workspace = workspace.weak_handle(); - let inline_assist_context_store = cx.new(|_cx| ContextStore::new(project.downgrade())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); @@ -621,11 +612,14 @@ impl AgentPanel { if let Some(panel) = panel.upgrade() { menu = Self::populate_recently_opened_menu_section(menu, panel, cx); } - menu.action("View All", Box::new(OpenHistory)) - .end_slot_action(DeleteRecentlyOpenThread.boxed_clone()) + + menu = menu + .action("View All", Box::new(OpenHistory)) .fixed_width(px(320.).into()) .keep_open_on_confirm(false) - .key_context("NavigationMenu") + .key_context("NavigationMenu"); + + menu }); weak_panel .update(cx, |panel, cx| { @@ -685,7 +679,6 @@ impl AgentPanel { configuration: None, configuration_subscription: None, context_server_registry, - inline_assist_context_store, previous_view: None, new_thread_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), @@ -701,6 +694,7 @@ impl AgentPanel { history_store, selected_agent: AgentType::default(), loading: false, + show_trust_workspace_message: false, }; // Initial sync of agent servers from extensions @@ -726,10 +720,6 @@ impl AgentPanel { &self.prompt_store } - pub(crate) fn inline_assist_context_store(&self) -> &Entity { - &self.inline_assist_context_store - } - pub(crate) fn thread_store(&self) -> &Entity { &self.history_store } @@ -828,10 +818,11 @@ impl AgentPanel { window, cx, ), + true, window, cx, ); - text_thread_editor.focus_handle(cx).focus(window); + text_thread_editor.focus_handle(cx).focus(window, cx); } fn external_thread( @@ -897,34 +888,21 @@ impl AgentPanel { }; let server = ext_agent.server(fs, history); + this.update_in(cx, |agent_panel, window, cx| { + agent_panel._external_thread( + server, + resume_thread, + summarize_thread, + workspace, + project, + loading, + ext_agent, + window, + cx, + ); + })?; - if !loading { - telemetry::event!("Agent Thread Started", agent = server.telemetry_id()); - } - - this.update_in(cx, |this, window, cx| { - let selected_agent = ext_agent.into(); - if this.selected_agent != selected_agent { - this.selected_agent = selected_agent; - this.serialize(cx); - } - - let thread_view = cx.new(|cx| { - crate::acp::AcpThreadView::new( - server, - resume_thread, - summarize_thread, - workspace.clone(), - project, - this.history_store.clone(), - this.prompt_store.clone(), - window, - cx, - ) - }); - - this.set_active_view(ActiveView::ExternalAgentThread { thread_view }, window, cx); - }) + anyhow::Ok(()) }) .detach_and_log_err(cx); } @@ -957,7 +935,7 @@ impl AgentPanel { if let Some(thread_view) = self.active_thread_view() { thread_view.update(cx, |view, cx| { view.expand_message_editor(&ExpandMessageEditor, window, cx); - view.focus_handle(cx).focus(window); + view.focus_handle(cx).focus(window, cx); }); } } @@ -965,10 +943,10 @@ impl AgentPanel { fn open_history(&mut self, window: &mut Window, cx: &mut Context) { if matches!(self.active_view, ActiveView::History) { if let Some(previous_view) = self.previous_view.take() { - self.set_active_view(previous_view, window, cx); + self.set_active_view(previous_view, true, window, cx); } } else { - self.set_active_view(ActiveView::History, window, cx); + self.set_active_view(ActiveView::History, true, window, cx); } cx.notify(); } @@ -1024,6 +1002,7 @@ impl AgentPanel { window, cx, ), + true, window, cx, ); @@ -1037,12 +1016,12 @@ impl AgentPanel { match &self.active_view { ActiveView::ExternalAgentThread { thread_view } => { - thread_view.focus_handle(cx).focus(window); + thread_view.focus_handle(cx).focus(window, cx); } ActiveView::TextThread { text_thread_editor, .. } => { - text_thread_editor.focus_handle(cx).focus(window); + text_thread_editor.focus_handle(cx).focus(window, cx); } ActiveView::History | ActiveView::Configuration => {} } @@ -1169,7 +1148,7 @@ impl AgentPanel { let context_server_store = self.project.read(cx).context_server_store(); let fs = self.fs.clone(); - self.set_active_view(ActiveView::Configuration, window, cx); + self.set_active_view(ActiveView::Configuration, true, window, cx); self.configuration = Some(cx.new(|cx| { AgentConfiguration::new( fs, @@ -1190,7 +1169,7 @@ impl AgentPanel { Self::handle_agent_configuration_event, )); - configuration.focus_handle(cx).focus(window); + configuration.focus_handle(cx).focus(window, cx); } } @@ -1286,6 +1265,7 @@ impl AgentPanel { fn set_active_view( &mut self, new_view: ActiveView, + focus: bool, window: &mut Window, cx: &mut Context, ) { @@ -1324,7 +1304,9 @@ impl AgentPanel { self.active_view = new_view; } - self.focus_handle(cx).focus(window); + if focus { + self.focus_handle(cx).focus(window, cx); + } } fn populate_recently_opened_menu_section( @@ -1459,8 +1441,8 @@ impl AgentPanel { self.serialize(cx); self.external_thread(Some(crate::ExternalAgent::Codex), None, None, window, cx) } - AgentType::Custom { name, command } => self.external_thread( - Some(crate::ExternalAgent::Custom { name, command }), + AgentType::Custom { name } => self.external_thread( + Some(crate::ExternalAgent::Custom { name }), None, None, window, @@ -1483,6 +1465,47 @@ impl AgentPanel { cx, ); } + + fn _external_thread( + &mut self, + server: Rc, + resume_thread: Option, + summarize_thread: Option, + workspace: WeakEntity, + project: Entity, + loading: bool, + ext_agent: ExternalAgent, + window: &mut Window, + cx: &mut Context, + ) { + let selected_agent = AgentType::from(ext_agent); + if self.selected_agent != selected_agent { + self.selected_agent = selected_agent; + self.serialize(cx); + } + + let thread_view = cx.new(|cx| { + crate::acp::AcpThreadView::new( + server, + resume_thread, + summarize_thread, + workspace.clone(), + project, + self.history_store.clone(), + self.prompt_store.clone(), + !loading, + window, + cx, + ) + }); + + self.set_active_view( + ActiveView::ExternalAgentThread { thread_view }, + !loading, + window, + cx, + ); + } } impl Focusable for AgentPanel { @@ -1597,14 +1620,19 @@ impl AgentPanel { let content = match &self.active_view { ActiveView::ExternalAgentThread { thread_view } => { + let is_generating_title = thread_view + .read(cx) + .as_native_thread(cx) + .map_or(false, |t| t.read(cx).is_generating_title()); + if let Some(title_editor) = thread_view.read(cx).title_editor() { - div() + let container = div() .w_full() .on_action({ let thread_view = thread_view.downgrade(); move |_: &menu::Confirm, window, cx| { if let Some(thread_view) = thread_view.upgrade() { - thread_view.focus_handle(cx).focus(window); + thread_view.focus_handle(cx).focus(window, cx); } } }) @@ -1612,12 +1640,25 @@ impl AgentPanel { let thread_view = thread_view.downgrade(); move |_: &editor::actions::Cancel, window, cx| { if let Some(thread_view) = thread_view.upgrade() { - thread_view.focus_handle(cx).focus(window); + thread_view.focus_handle(cx).focus(window, cx); } } }) - .child(title_editor) - .into_any_element() + .child(title_editor); + + if is_generating_title { + container + .with_animation( + "generating_title", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |div, delta| div.opacity(delta), + ) + .into_any_element() + } else { + container.into_any_element() + } } else { Label::new(thread_view.read(cx).title(cx)) .color(Color::Muted) @@ -1647,6 +1688,13 @@ impl AgentPanel { Label::new(LOADING_SUMMARY_PLACEHOLDER) .truncate() .color(Color::Muted) + .with_animation( + "generating_title", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.alpha(delta), + ) .into_any_element() } } @@ -1690,6 +1738,25 @@ impl AgentPanel { .into_any() } + fn handle_regenerate_thread_title(thread_view: Entity, cx: &mut App) { + thread_view.update(cx, |thread_view, cx| { + if let Some(thread) = thread_view.as_native_thread(cx) { + thread.update(cx, |thread, cx| { + thread.generate_title(cx); + }); + } + }); + } + + fn handle_regenerate_text_thread_title( + text_thread_editor: Entity, + cx: &mut App, + ) { + text_thread_editor.update(cx, |text_thread_editor, cx| { + text_thread_editor.regenerate_summary(cx); + }); + } + fn render_panel_options_menu( &self, window: &mut Window, @@ -1709,6 +1776,35 @@ impl AgentPanel { let selected_agent = self.selected_agent.clone(); + let text_thread_view = match &self.active_view { + ActiveView::TextThread { + text_thread_editor, .. + } => Some(text_thread_editor.clone()), + _ => None, + }; + let text_thread_with_messages = match &self.active_view { + ActiveView::TextThread { + text_thread_editor, .. + } => text_thread_editor + .read(cx) + .text_thread() + .read(cx) + .messages(cx) + .any(|message| message.role == language_model::Role::Assistant), + _ => false, + }; + + let thread_view = match &self.active_view { + ActiveView::ExternalAgentThread { thread_view } => Some(thread_view.clone()), + _ => None, + }; + let thread_with_messages = match &self.active_view { + ActiveView::ExternalAgentThread { thread_view } => { + thread_view.read(cx).has_user_submitted_prompt(cx) + } + _ => false, + }; + PopoverMenu::new("agent-options-menu") .trigger_with_tooltip( IconButton::new("agent-options-menu", IconName::Ellipsis) @@ -1731,6 +1827,7 @@ impl AgentPanel { move |window, cx| { Some(ContextMenu::build(window, cx, |mut menu, _window, _| { menu = menu.context(focus_handle.clone()); + if let Some(usage) = usage { menu = menu .header_with_link("Prompt Usage", "Manage", account_url.clone()) @@ -1768,6 +1865,38 @@ impl AgentPanel { .separator() } + if thread_with_messages | text_thread_with_messages { + menu = menu.header("Current Thread"); + + if let Some(text_thread_view) = text_thread_view.as_ref() { + menu = menu + .entry("Regenerate Thread Title", None, { + let text_thread_view = text_thread_view.clone(); + move |_, cx| { + Self::handle_regenerate_text_thread_title( + text_thread_view.clone(), + cx, + ); + } + }) + .separator(); + } + + if let Some(thread_view) = thread_view.as_ref() { + menu = menu + .entry("Regenerate Thread Title", None, { + let thread_view = thread_view.clone(); + move |_, cx| { + Self::handle_regenerate_thread_title( + thread_view.clone(), + cx, + ); + } + }) + .separator(); + } + } + menu = menu .header("MCP Servers") .action( @@ -1857,14 +1986,17 @@ impl AgentPanel { let agent_server_store = self.project.read(cx).agent_server_store().clone(); let focus_handle = self.focus_handle(cx); - // Get custom icon path for selected agent before building menu (to avoid borrow issues) - let selected_agent_custom_icon = + let (selected_agent_custom_icon, selected_agent_label) = if let AgentType::Custom { name, .. } = &self.selected_agent { - agent_server_store - .read(cx) - .agent_icon(&ExternalAgentServerName(name.clone())) + let store = agent_server_store.read(cx); + let icon = store.agent_icon(&ExternalAgentServerName(name.clone())); + + let label = store + .agent_display_name(&ExternalAgentServerName(name.clone())) + .unwrap_or_else(|| self.selected_agent.label()); + (icon, label) } else { - None + (None, self.selected_agent.label()) }; let active_thread = match &self.active_view { @@ -1892,6 +2024,9 @@ impl AgentPanel { .anchor(Corner::TopRight) .with_handle(self.new_thread_menu_handle.clone()) .menu({ + let selected_agent = self.selected_agent.clone(); + let is_agent_selected = move |agent_type: AgentType| selected_agent == agent_type; + let workspace = self.workspace.clone(); let is_via_collab = workspace .update(cx, |workspace, cx| { @@ -1905,7 +2040,6 @@ impl AgentPanel { let active_thread = active_thread.clone(); Some(ContextMenu::build(window, cx, |menu, _window, cx| { menu.context(focus_handle.clone()) - .header("Zed Agent") .when_some(active_thread, |this, active_thread| { let thread = active_thread.read(cx); @@ -1929,9 +2063,11 @@ impl AgentPanel { } }) .item( - ContextMenuEntry::new("New Thread") - .action(NewThread.boxed_clone()) - .icon(IconName::Thread) + ContextMenuEntry::new("Zed Agent") + .when(is_agent_selected(AgentType::NativeAgent) | is_agent_selected(AgentType::TextThread) , |this| { + this.action(Box::new(NewExternalAgentThread { agent: None })) + }) + .icon(IconName::ZedAgent) .icon_color(Color::Muted) .handler({ let workspace = workspace.clone(); @@ -1955,10 +2091,10 @@ impl AgentPanel { }), ) .item( - ContextMenuEntry::new("New Text Thread") + ContextMenuEntry::new("Text Thread") + .action(NewTextThread.boxed_clone()) .icon(IconName::TextThread) .icon_color(Color::Muted) - .action(NewTextThread.boxed_clone()) .handler({ let workspace = workspace.clone(); move |window, cx| { @@ -1983,7 +2119,10 @@ impl AgentPanel { .separator() .header("External Agents") .item( - ContextMenuEntry::new("New Claude Code") + ContextMenuEntry::new("Claude Code") + .when(is_agent_selected(AgentType::ClaudeCode), |this| { + this.action(Box::new(NewExternalAgentThread { agent: None })) + }) .icon(IconName::AiClaude) .disabled(is_via_collab) .icon_color(Color::Muted) @@ -2009,7 +2148,10 @@ impl AgentPanel { }), ) .item( - ContextMenuEntry::new("New Codex CLI") + ContextMenuEntry::new("Codex CLI") + .when(is_agent_selected(AgentType::Codex), |this| { + this.action(Box::new(NewExternalAgentThread { agent: None })) + }) .icon(IconName::AiOpenAi) .disabled(is_via_collab) .icon_color(Color::Muted) @@ -2035,7 +2177,10 @@ impl AgentPanel { }), ) .item( - ContextMenuEntry::new("New Gemini CLI") + ContextMenuEntry::new("Gemini CLI") + .when(is_agent_selected(AgentType::Gemini), |this| { + this.action(Box::new(NewExternalAgentThread { agent: None })) + }) .icon(IconName::AiGemini) .icon_color(Color::Muted) .disabled(is_via_collab) @@ -2061,8 +2206,8 @@ impl AgentPanel { }), ) .map(|mut menu| { - let agent_server_store_read = agent_server_store.read(cx); - let agent_names = agent_server_store_read + let agent_server_store = agent_server_store.read(cx); + let agent_names = agent_server_store .external_agents() .filter(|name| { name.0 != GEMINI_NAME @@ -2071,27 +2216,34 @@ impl AgentPanel { }) .cloned() .collect::>(); - let custom_settings = cx - .global::() - .get::(None) - .custom - .clone(); + for agent_name in agent_names { - let icon_path = agent_server_store_read.agent_icon(&agent_name); - let mut entry = - ContextMenuEntry::new(format!("New {}", agent_name)); + let icon_path = agent_server_store.agent_icon(&agent_name); + let display_name = agent_server_store + .agent_display_name(&agent_name) + .unwrap_or_else(|| agent_name.0.clone()); + + let mut entry = ContextMenuEntry::new(display_name); + if let Some(icon_path) = icon_path { entry = entry.custom_icon_svg(icon_path); } else { - entry = entry.icon(IconName::Terminal); + entry = entry.icon(IconName::Sparkle); } entry = entry + .when( + is_agent_selected(AgentType::Custom { + name: agent_name.0.clone(), + }), + |this| { + this.action(Box::new(NewExternalAgentThread { agent: None })) + }, + ) .icon_color(Color::Muted) .disabled(is_via_collab) .handler({ let workspace = workspace.clone(); let agent_name = agent_name.clone(); - let custom_settings = custom_settings.clone(); move |window, cx| { if let Some(workspace) = workspace.upgrade() { workspace.update(cx, |workspace, cx| { @@ -2104,17 +2256,6 @@ impl AgentPanel { name: agent_name .clone() .into(), - command: custom_settings - .get(&agent_name.0) - .map(|settings| { - settings - .command - .clone() - }) - .unwrap_or( - placeholder_command( - ), - ), }, window, cx, @@ -2125,6 +2266,7 @@ impl AgentPanel { } } }); + menu = menu.item(entry); } @@ -2150,30 +2292,41 @@ impl AgentPanel { } }); - let selected_agent_label = self.selected_agent.label(); + let is_thread_loading = self + .active_thread_view() + .map(|thread| thread.read(cx).is_loading()) + .unwrap_or(false); let has_custom_icon = selected_agent_custom_icon.is_some(); + let selected_agent = div() .id("selected_agent_icon") .when_some(selected_agent_custom_icon, |this, icon_path| { - let label = selected_agent_label.clone(); - this.px(DynamicSpacing::Base02.rems(cx)) + this.px_1() .child(Icon::from_external_svg(icon_path).color(Color::Muted)) - .tooltip(move |_window, cx| { - Tooltip::with_meta(label.clone(), None, "Selected Agent", cx) - }) }) .when(!has_custom_icon, |this| { this.when_some(self.selected_agent.icon(), |this, icon| { - let label = selected_agent_label.clone(); - this.px(DynamicSpacing::Base02.rems(cx)) - .child(Icon::new(icon).color(Color::Muted)) - .tooltip(move |_window, cx| { - Tooltip::with_meta(label.clone(), None, "Selected Agent", cx) - }) + this.px_1().child(Icon::new(icon).color(Color::Muted)) }) }) - .into_any_element(); + .tooltip(move |_, cx| { + Tooltip::with_meta(selected_agent_label.clone(), None, "Selected Agent", cx) + }); + + let selected_agent = if is_thread_loading { + selected_agent + .with_animation( + "pulsating-icon", + Animation::new(Duration::from_secs(1)) + .repeat() + .with_easing(pulsating_between(0.2, 0.6)), + |icon, delta| icon.opacity(delta), + ) + .into_any_element() + } else { + selected_agent.into_any_element() + }; h_flex() .id("agent-panel-toolbar") @@ -2539,6 +2692,38 @@ impl AgentPanel { } } + fn render_workspace_trust_message(&self, cx: &Context) -> Option { + if !self.show_trust_workspace_message { + return None; + } + + let description = "To protect your system, third-party code—like MCP servers—won't run until you mark this workspace as safe."; + + Some( + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .border_position(ui::BorderPosition::Bottom) + .title("You're in Restricted Mode") + .description(description) + .actions_slot( + Button::new("open-trust-modal", "Configure Project Trust") + .label_size(LabelSize::Small) + .style(ButtonStyle::Outlined) + .on_click({ + cx.listener(move |this, _, window, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace + .show_worktree_trust_security_modal(true, window, cx) + }) + .log_err(); + }) + }), + ), + ) + } + fn key_context(&self) -> KeyContext { let mut key_context = KeyContext::new_with_defaults(); key_context.add("AgentPanel"); @@ -2591,6 +2776,7 @@ impl Render for AgentPanel { } })) .child(self.render_toolbar(window, cx)) + .children(self.render_workspace_trust_message(cx)) .children(self.render_onboarding(window, cx)) .map(|parent| match &self.active_view { ActiveView::ExternalAgentThread { thread_view, .. } => parent @@ -2662,27 +2848,24 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { cx: &mut Context, ) { InlineAssistant::update_global(cx, |assistant, cx| { - let Some(project) = self - .workspace - .upgrade() - .map(|workspace| workspace.read(cx).project().downgrade()) - else { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let Some(panel) = workspace.read(cx).panel::(cx) else { return; }; - let prompt_store = None; - let thread_store = None; - let context_store = cx.new(|_| ContextStore::new(project.clone())); + let project = workspace.read(cx).project().downgrade(); + let thread_store = panel.read(cx).thread_store().clone(); assistant.assist( prompt_editor, self.workspace.clone(), - context_store, project, - prompt_store, thread_store, + None, initial_prompt, window, cx, - ) + ); }) } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 781374f117d24b2265a16a5aa9260690850d10d4..02cb7e59948b10274302bd8cd6f74f1accbd30a3 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -1,17 +1,17 @@ -mod acp; +pub mod acp; mod agent_configuration; mod agent_diff; mod agent_model_selector; mod agent_panel; mod buffer_codegen; +mod completion_provider; mod context; -mod context_picker; mod context_server_configuration; -mod context_store; -mod context_strip; +mod favorite_models; mod inline_assistant; mod inline_prompt_editor; mod language_model_selector; +mod mention_set; mod profile_selector; mod slash_command; mod slash_command_picker; @@ -27,15 +27,17 @@ use agent_settings::{AgentProfileId, AgentSettings}; use assistant_slash_command::SlashCommandRegistry; use client::Client; use command_palette_hooks::CommandPaletteFilter; -use feature_flags::FeatureFlagAppExt as _; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _}; use fs::Fs; use gpui::{Action, App, Entity, SharedString, actions}; -use language::LanguageRegistry; +use language::{ + LanguageRegistry, + language_settings::{AllLanguageSettings, EditPredictionProvider}, +}; use language_model::{ - ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, + ConfiguredModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, }; use project::DisableAiSettings; -use project::agent_server_store::AgentServerCommand; use prompt_store::PromptBuilder; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -54,8 +56,6 @@ actions!( [ /// Creates a new text-based conversation thread. NewTextThread, - /// Toggles the context picker interface for adding files, symbols, or other context. - ToggleContextPicker, /// Toggles the menu to create new agent threads. ToggleNewThreadMenu, /// Toggles the navigation menu for switching between threads and views. @@ -68,10 +68,12 @@ actions!( ToggleProfileSelector, /// Cycles through available session modes. CycleModeSelector, - /// Removes all added context from the current conversation. - RemoveAllContext, + /// Cycles through favorited models in the ACP model selector. + CycleFavoriteModels, /// Expands the message editor to full size. ExpandMessageEditor, + /// Removes all thread history. + RemoveHistory, /// Opens the conversation history view. OpenHistory, /// Adds a context server to the configuration. @@ -92,10 +94,6 @@ actions!( FocusLeft, /// Moves focus right in the interface. FocusRight, - /// Removes the currently focused context item. - RemoveFocusedContext, - /// Accepts the suggested context item. - AcceptSuggestedContext, /// Opens the active thread as a markdown file. OpenActiveThreadAsMarkdown, /// Opens the agent diff view to review changes. @@ -159,31 +157,10 @@ pub enum ExternalAgent { ClaudeCode, Codex, NativeAgent, - Custom { - name: SharedString, - command: AgentServerCommand, - }, -} - -fn placeholder_command() -> AgentServerCommand { - AgentServerCommand { - path: "/placeholder".into(), - args: vec![], - env: None, - } + Custom { name: SharedString }, } impl ExternalAgent { - pub fn parse_built_in(server: &dyn agent_servers::AgentServer) -> Option { - match server.telemetry_id() { - "gemini-cli" => Some(Self::Gemini), - "claude-code" => Some(Self::ClaudeCode), - "codex" => Some(Self::Codex), - "zed" => Some(Self::NativeAgent), - _ => None, - } - } - pub fn server( &self, fs: Arc, @@ -194,9 +171,7 @@ impl ExternalAgent { Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode), Self::Codex => Rc::new(agent_servers::Codex), Self::NativeAgent => Rc::new(agent::NativeAgentServer::new(fs, history)), - Self::Custom { name, command: _ } => { - Rc::new(agent_servers::CustomAgentServer::new(name.clone())) - } + Self::Custom { name } => Rc::new(agent_servers::CustomAgentServer::new(name.clone())), } } } @@ -231,11 +206,6 @@ impl ModelUsageContext { } } } - - pub fn language_model(&self, cx: &App) -> Option> { - self.configured_model(cx) - .map(|configured_model| configured_model.model) - } } /// Initializes the `agent` crate. @@ -247,7 +217,7 @@ pub fn init( is_eval: bool, cx: &mut App, ) { - assistant_text_thread::init(client.clone(), cx); + assistant_text_thread::init(client, cx); rules_library::init(cx); if !is_eval { // Initializing the language model from the user settings messes with the eval, so we only initialize them when @@ -260,13 +230,8 @@ pub fn init( TextThreadEditor::init(cx); register_slash_commands(cx); - inline_assistant::init( - fs.clone(), - prompt_builder.clone(), - client.telemetry().clone(), - cx, - ); - terminal_inline_assistant::init(fs.clone(), prompt_builder, client.telemetry().clone(), cx); + inline_assistant::init(fs.clone(), prompt_builder.clone(), cx); + terminal_inline_assistant::init(fs.clone(), prompt_builder, cx); cx.observe_new(move |workspace, window, cx| { ConfigureContextServerModal::register(workspace, language_registry.clone(), window, cx) }) @@ -282,56 +247,93 @@ pub fn init( update_command_palette_filter(app_cx); }) .detach(); + + cx.on_flags_ready(|_, cx| { + update_command_palette_filter(cx); + }) + .detach(); } fn update_command_palette_filter(cx: &mut App) { let disable_ai = DisableAiSettings::get_global(cx).disable_ai; + let agent_enabled = AgentSettings::get_global(cx).enabled; + let agent_v2_enabled = cx.has_flag::(); + let edit_prediction_provider = AllLanguageSettings::get_global(cx) + .edit_predictions + .provider; + CommandPaletteFilter::update_global(cx, |filter, _| { + use editor::actions::{ + AcceptEditPrediction, AcceptNextLineEditPrediction, AcceptNextWordEditPrediction, + NextEditPrediction, PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction, + }; + let edit_prediction_actions = [ + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; + if disable_ai { filter.hide_namespace("agent"); + filter.hide_namespace("agents"); filter.hide_namespace("assistant"); filter.hide_namespace("copilot"); filter.hide_namespace("supermaven"); filter.hide_namespace("zed_predict_onboarding"); filter.hide_namespace("edit_prediction"); - use editor::actions::{ - AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction, - PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction, - }; - let edit_prediction_actions = [ - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - ]; filter.hide_action_types(&edit_prediction_actions); filter.hide_action_types(&[TypeId::of::()]); } else { - filter.show_namespace("agent"); + if agent_enabled { + filter.show_namespace("agent"); + filter.show_namespace("agents"); + } else { + filter.hide_namespace("agent"); + filter.hide_namespace("agents"); + } + filter.show_namespace("assistant"); - filter.show_namespace("copilot"); - filter.show_namespace("zed_predict_onboarding"); - filter.show_namespace("edit_prediction"); - - use editor::actions::{ - AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction, - PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction, - }; - let edit_prediction_actions = [ - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - ]; - filter.show_action_types(edit_prediction_actions.iter()); + match edit_prediction_provider { + EditPredictionProvider::None => { + filter.hide_namespace("edit_prediction"); + filter.hide_namespace("copilot"); + filter.hide_namespace("supermaven"); + filter.hide_action_types(&edit_prediction_actions); + } + EditPredictionProvider::Copilot => { + filter.show_namespace("edit_prediction"); + filter.show_namespace("copilot"); + filter.hide_namespace("supermaven"); + filter.show_action_types(edit_prediction_actions.iter()); + } + EditPredictionProvider::Supermaven => { + filter.show_namespace("edit_prediction"); + filter.hide_namespace("copilot"); + filter.show_namespace("supermaven"); + filter.show_action_types(edit_prediction_actions.iter()); + } + EditPredictionProvider::Zed + | EditPredictionProvider::Codestral + | EditPredictionProvider::Experimental(_) => { + filter.show_namespace("edit_prediction"); + filter.hide_namespace("copilot"); + filter.hide_namespace("supermaven"); + filter.show_action_types(edit_prediction_actions.iter()); + } + } + filter.show_namespace("zed_predict_onboarding"); filter.show_action_types(&[TypeId::of::()]); + if !agent_v2_enabled { + filter.hide_action_types(&[TypeId::of::()]); + } } }); } @@ -420,3 +422,140 @@ fn register_slash_commands(cx: &mut App) { }) .detach(); } + +#[cfg(test)] +mod tests { + use super::*; + use agent_settings::{AgentProfileId, AgentSettings, CompletionMode}; + use command_palette_hooks::CommandPaletteFilter; + use editor::actions::AcceptEditPrediction; + use gpui::{BorrowAppContext, TestAppContext, px}; + use project::DisableAiSettings; + use settings::{ + DefaultAgentView, DockPosition, DockSide, NotifyWhenAgentWaiting, Settings, SettingsStore, + }; + + #[gpui::test] + fn test_agent_command_palette_visibility(cx: &mut TestAppContext) { + // Init settings + cx.update(|cx| { + let store = SettingsStore::test(cx); + cx.set_global(store); + command_palette_hooks::init(cx); + AgentSettings::register(cx); + DisableAiSettings::register(cx); + AllLanguageSettings::register(cx); + }); + + let agent_settings = AgentSettings { + enabled: true, + button: true, + dock: DockPosition::Right, + agents_panel_dock: DockSide::Left, + default_width: px(300.), + default_height: px(600.), + default_model: None, + inline_assistant_model: None, + inline_assistant_use_streaming_tools: false, + commit_message_model: None, + thread_summary_model: None, + inline_alternatives: vec![], + favorite_models: vec![], + default_profile: AgentProfileId::default(), + default_view: DefaultAgentView::Thread, + profiles: Default::default(), + always_allow_tool_actions: false, + notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), + play_sound_when_agent_done: false, + single_file_review: false, + model_parameters: vec![], + preferred_completion_mode: CompletionMode::Normal, + enable_feedback: false, + expand_edit_card: true, + expand_terminal_card: true, + use_modifier_to_send: true, + message_editor_min_lines: 1, + }; + + cx.update(|cx| { + AgentSettings::override_global(agent_settings.clone(), cx); + DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx); + + // Initial update + update_command_palette_filter(cx); + }); + + // Assert visible + cx.update(|cx| { + let filter = CommandPaletteFilter::try_global(cx).unwrap(); + assert!( + !filter.is_hidden(&NewThread), + "NewThread should be visible by default" + ); + }); + + // Disable agent + cx.update(|cx| { + let mut new_settings = agent_settings.clone(); + new_settings.enabled = false; + AgentSettings::override_global(new_settings, cx); + + // Trigger update + update_command_palette_filter(cx); + }); + + // Assert hidden + cx.update(|cx| { + let filter = CommandPaletteFilter::try_global(cx).unwrap(); + assert!( + filter.is_hidden(&NewThread), + "NewThread should be hidden when agent is disabled" + ); + }); + + // Test EditPredictionProvider + // Enable EditPredictionProvider::Copilot + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings(cx, |s| { + s.project + .all_languages + .features + .get_or_insert(Default::default()) + .edit_prediction_provider = Some(EditPredictionProvider::Copilot); + }); + }); + update_command_palette_filter(cx); + }); + + cx.update(|cx| { + let filter = CommandPaletteFilter::try_global(cx).unwrap(); + assert!( + !filter.is_hidden(&AcceptEditPrediction), + "EditPrediction should be visible when provider is Copilot" + ); + }); + + // Disable EditPredictionProvider (None) + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings(cx, |s| { + s.project + .all_languages + .features + .get_or_insert(Default::default()) + .edit_prediction_provider = Some(EditPredictionProvider::None); + }); + }); + update_command_palette_filter(cx); + }); + + cx.update(|cx| { + let filter = CommandPaletteFilter::try_global(cx).unwrap(); + assert!( + filter.is_hidden(&AcceptEditPrediction), + "EditPrediction should be hidden when provider is None" + ); + }); + } +} diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 51c3c5ad3cb2b89c85e3a276ff34af5cc46115f9..a296d4d20918fba6eb32bfcf7fcc657f9db2b3ac 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -1,26 +1,34 @@ -use crate::{ - context::load_context, context_store::ContextStore, inline_prompt_editor::CodegenStatus, -}; +use crate::{context::LoadedContext, inline_prompt_editor::CodegenStatus}; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; -use client::telemetry::Telemetry; +use uuid::Uuid; + use cloud_llm_client::CompletionIntent; use collections::HashSet; use editor::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint}; +use feature_flags::{FeatureFlagAppExt as _, InlineAssistantUseToolFeatureFlag}; use futures::{ - SinkExt, Stream, StreamExt, TryStreamExt as _, channel::mpsc, future::LocalBoxFuture, join, + SinkExt, Stream, StreamExt, TryStreamExt as _, + channel::mpsc, + future::{LocalBoxFuture, Shared}, + join, + stream::BoxStream, }; -use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription, Task, WeakEntity}; -use language::{Buffer, IndentKind, Point, TransactionId, line_diff}; +use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task}; +use language::{Buffer, IndentKind, LanguageName, Point, TransactionId, line_diff}; use language_model::{ - LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelTextStream, Role, report_assistant_event, + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, + LanguageModelRequestTool, LanguageModelTextStream, LanguageModelToolChoice, + LanguageModelToolUse, Role, TokenUsage, }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; -use project::Project; -use prompt_store::{PromptBuilder, PromptStore}; +use prompt_store::PromptBuilder; use rope::Rope; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::Settings as _; use smol::future::FutureExt; use std::{ cmp, @@ -33,7 +41,26 @@ use std::{ time::Instant, }; use streaming_diff::{CharOperation, LineDiff, LineOperation, StreamingDiff}; -use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; + +/// Use this tool when you cannot or should not make a rewrite. This includes: +/// - The user's request is unclear, ambiguous, or nonsensical +/// - The requested change cannot be made by only editing the section +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct FailureMessageInput { + /// A brief message to the user explaining why you're unable to fulfill the request or to ask a question about the request. + #[serde(default)] + pub message: String, +} + +/// Replaces text in tags with your replacement_text. +/// Only use this tool when you are confident you understand the user's request and can fulfill it +/// by editing the marked section. +#[derive(Debug, Serialize, Deserialize, JsonSchema)] +pub struct RewriteSectionInput { + /// The text to replace the section with. + #[serde(default)] + pub replacement_text: String, +} pub struct BufferCodegen { alternatives: Vec>, @@ -43,23 +70,20 @@ pub struct BufferCodegen { buffer: Entity, range: Range, initial_transaction_id: Option, - context_store: Entity, - project: WeakEntity, - prompt_store: Option>, - telemetry: Arc, builder: Arc, pub is_insertion: bool, + session_id: Uuid, } +pub const REWRITE_SECTION_TOOL_NAME: &str = "rewrite_section"; +pub const FAILURE_MESSAGE_TOOL_NAME: &str = "failure_message"; + impl BufferCodegen { pub fn new( buffer: Entity, range: Range, initial_transaction_id: Option, - context_store: Entity, - project: WeakEntity, - prompt_store: Option>, - telemetry: Arc, + session_id: Uuid, builder: Arc, cx: &mut Context, ) -> Self { @@ -68,11 +92,8 @@ impl BufferCodegen { buffer.clone(), range.clone(), false, - Some(context_store.clone()), - project.clone(), - prompt_store.clone(), - Some(telemetry.clone()), builder.clone(), + session_id, cx, ) }); @@ -85,11 +106,8 @@ impl BufferCodegen { buffer, range, initial_transaction_id, - context_store, - project, - prompt_store, - telemetry, builder, + session_id, }; this.activate(0, cx); this @@ -104,10 +122,18 @@ impl BufferCodegen { .push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event))); } + pub fn active_completion(&self, cx: &App) -> Option { + self.active_alternative().read(cx).current_completion() + } + pub fn active_alternative(&self) -> &Entity { &self.alternatives[self.active_alternative] } + pub fn language_name(&self, cx: &App) -> Option { + self.active_alternative().read(cx).language_name(cx) + } + pub fn status<'a>(&self, cx: &'a App) -> &'a CodegenStatus { &self.active_alternative().read(cx).status } @@ -148,6 +174,7 @@ impl BufferCodegen { &mut self, primary_model: Arc, user_prompt: String, + context_task: Shared>>, cx: &mut Context, ) -> Result<()> { let alternative_models = LanguageModelRegistry::read_global(cx) @@ -165,11 +192,8 @@ impl BufferCodegen { self.buffer.clone(), self.range.clone(), false, - Some(self.context_store.clone()), - self.project.clone(), - self.prompt_store.clone(), - Some(self.telemetry.clone()), self.builder.clone(), + self.session_id, cx, ) })); @@ -180,7 +204,7 @@ impl BufferCodegen { .zip(&self.alternatives) { alternative.update(cx, |alternative, cx| { - alternative.start(user_prompt.clone(), model.clone(), cx) + alternative.start(user_prompt.clone(), context_task.clone(), model.clone(), cx) })?; } @@ -228,6 +252,14 @@ impl BufferCodegen { pub fn last_equal_ranges<'a>(&self, cx: &'a App) -> &'a [Range] { self.active_alternative().read(cx).last_equal_ranges() } + + pub fn selected_text<'a>(&self, cx: &'a App) -> Option<&'a str> { + self.active_alternative().read(cx).selected_text() + } + + pub fn session_id(&self) -> Uuid { + self.session_id + } } impl EventEmitter for BufferCodegen {} @@ -243,10 +275,6 @@ pub struct CodegenAlternative { status: CodegenStatus, generation: Task<()>, diff: Diff, - context_store: Option>, - project: WeakEntity, - prompt_store: Option>, - telemetry: Option>, _subscription: gpui::Subscription, builder: Arc, active: bool, @@ -254,7 +282,11 @@ pub struct CodegenAlternative { line_operations: Vec, elapsed_time: Option, completion: Option, + selected_text: Option, pub message_id: Option, + session_id: Uuid, + pub description: Option, + pub failure: Option, } impl EventEmitter for CodegenAlternative {} @@ -264,11 +296,8 @@ impl CodegenAlternative { buffer: Entity, range: Range, active: bool, - context_store: Option>, - project: WeakEntity, - prompt_store: Option>, - telemetry: Option>, builder: Arc, + session_id: Uuid, cx: &mut Context, ) -> Self { let snapshot = buffer.read(cx).snapshot(cx); @@ -291,7 +320,7 @@ impl CodegenAlternative { let mut buffer = Buffer::local_normalized(text, line_ending, cx); buffer.set_language(language, cx); if let Some(language_registry) = language_registry { - buffer.set_language_registry(language_registry) + buffer.set_language_registry(language_registry); } buffer }); @@ -307,21 +336,28 @@ impl CodegenAlternative { status: CodegenStatus::Idle, generation: Task::ready(()), diff: Diff::default(), - context_store, - project, - prompt_store, - telemetry, - _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), builder, - active, + active: active, edits: Vec::new(), line_operations: Vec::new(), range, elapsed_time: None, completion: None, + selected_text: None, + session_id, + description: None, + failure: None, + _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), } } + pub fn language_name(&self, cx: &App) -> Option { + self.old_buffer + .read(cx) + .language() + .map(|language| language.name()) + } + pub fn set_active(&mut self, active: bool, cx: &mut Context) { if active != self.active { self.active = active; @@ -363,12 +399,22 @@ impl CodegenAlternative { &self.last_equal_ranges } + pub fn use_streaming_tools(model: &dyn LanguageModel, cx: &App) -> bool { + model.supports_streaming_tools() + && cx.has_flag::() + && AgentSettings::get_global(cx).inline_assistant_use_streaming_tools + } + pub fn start( &mut self, user_prompt: String, + context_task: Shared>>, model: Arc, cx: &mut Context, ) -> Result<()> { + // Clear the model explanation since the user has started a new generation. + self.description = None; + if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() { self.buffer.update(cx, |buffer, cx| { buffer.undo_transaction(transformation_transaction_id, cx); @@ -377,27 +423,39 @@ impl CodegenAlternative { self.edit_position = Some(self.range.start.bias_right(&self.snapshot)); - let api_key = model.api_key(cx); - let telemetry_id = model.telemetry_id(); - let provider_id = model.provider_id(); - let stream: LocalBoxFuture> = - if user_prompt.trim().to_lowercase() == "delete" { - async { Ok(LanguageModelTextStream::default()) }.boxed_local() - } else { - let request = self.build_request(&model, user_prompt, cx)?; - cx.spawn(async move |_, cx| { - Ok(model.stream_completion_text(request.await, cx).await?) - }) - .boxed_local() - }; - self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx); + if Self::use_streaming_tools(model.as_ref(), cx) { + let request = self.build_request(&model, user_prompt, context_task, cx)?; + let completion_events = cx.spawn({ + let model = model.clone(); + async move |_, cx| model.stream_completion(request.await, cx).await + }); + self.generation = self.handle_completion(model, completion_events, cx); + } else { + let stream: LocalBoxFuture> = + if user_prompt.trim().to_lowercase() == "delete" { + async { Ok(LanguageModelTextStream::default()) }.boxed_local() + } else { + let request = self.build_request(&model, user_prompt, context_task, cx)?; + cx.spawn({ + let model = model.clone(); + async move |_, cx| { + Ok(model.stream_completion_text(request.await, cx).await?) + } + }) + .boxed_local() + }; + self.generation = + self.handle_stream(model, /* strip_invalid_spans: */ true, stream, cx); + } + Ok(()) } - fn build_request( + fn build_request_tools( &self, model: &Arc, user_prompt: String, + context_task: Shared>>, cx: &mut App, ) -> Result> { let buffer = self.buffer.read(cx).snapshot(cx); @@ -427,23 +485,119 @@ impl CodegenAlternative { anyhow::bail!("invalid transformation range"); }; - let prompt = self + let system_prompt = self .builder - .generate_inline_transformation_prompt(user_prompt, language_name, buffer, range) + .generate_inline_transformation_prompt_tools( + language_name, + buffer, + range.start.0..range.end.0, + ) .context("generating content prompt")?; - let context_task = self.context_store.as_ref().and_then(|context_store| { - if let Some(project) = self.project.upgrade() { - let context = context_store - .read(cx) - .context() - .cloned() - .collect::>(); - Some(load_context(context, &project, &self.prompt_store, cx)) - } else { + let temperature = AgentSettings::temperature_for_model(model, cx); + + let tool_input_format = model.tool_input_format(); + let tool_choice = model + .supports_tool_choice(LanguageModelToolChoice::Any) + .then_some(LanguageModelToolChoice::Any); + + Ok(cx.spawn(async move |_cx| { + let mut messages = vec![LanguageModelRequestMessage { + role: Role::System, + content: vec![system_prompt.into()], + cache: false, + reasoning_details: None, + }]; + + let mut user_message = LanguageModelRequestMessage { + role: Role::User, + content: Vec::new(), + cache: false, + reasoning_details: None, + }; + + if let Some(context) = context_task.await { + context.add_to_request_message(&mut user_message); + } + + user_message.content.push(user_prompt.into()); + messages.push(user_message); + + let tools = vec![ + LanguageModelRequestTool { + name: REWRITE_SECTION_TOOL_NAME.to_string(), + description: "Replaces text in tags with your replacement_text.".to_string(), + input_schema: language_model::tool_schema::root_schema_for::(tool_input_format).to_value(), + }, + LanguageModelRequestTool { + name: FAILURE_MESSAGE_TOOL_NAME.to_string(), + description: "Use this tool to provide a message to the user when you're unable to complete a task.".to_string(), + input_schema: language_model::tool_schema::root_schema_for::(tool_input_format).to_value(), + }, + ]; + + LanguageModelRequest { + thread_id: None, + prompt_id: None, + intent: Some(CompletionIntent::InlineAssist), + mode: None, + tools, + tool_choice, + stop: Vec::new(), + temperature, + messages, + thinking_allowed: false, + } + })) + } + + fn build_request( + &self, + model: &Arc, + user_prompt: String, + context_task: Shared>>, + cx: &mut App, + ) -> Result> { + if Self::use_streaming_tools(model.as_ref(), cx) { + return self.build_request_tools(model, user_prompt, context_task, cx); + } + + let buffer = self.buffer.read(cx).snapshot(cx); + let language = buffer.language_at(self.range.start); + let language_name = if let Some(language) = language.as_ref() { + if Arc::ptr_eq(language, &language::PLAIN_TEXT) { None + } else { + Some(language.name()) } - }); + } else { + None + }; + + let language_name = language_name.as_ref(); + let start = buffer.point_to_buffer_offset(self.range.start); + let end = buffer.point_to_buffer_offset(self.range.end); + let (buffer, range) = if let Some((start, end)) = start.zip(end) { + let (start_buffer, start_buffer_offset) = start; + let (end_buffer, end_buffer_offset) = end; + if start_buffer.remote_id() == end_buffer.remote_id() { + (start_buffer.clone(), start_buffer_offset..end_buffer_offset) + } else { + anyhow::bail!("invalid transformation range"); + } + } else { + anyhow::bail!("invalid transformation range"); + }; + + let prompt = self + .builder + .generate_inline_transformation_prompt( + user_prompt, + language_name, + buffer, + range.start.0..range.end.0, + ) + .context("generating content prompt")?; let temperature = AgentSettings::temperature_for_model(model, cx); @@ -452,12 +606,11 @@ impl CodegenAlternative { role: Role::User, content: Vec::new(), cache: false, + reasoning_details: None, }; - if let Some(context_task) = context_task { - context_task - .await - .add_to_request_message(&mut request_message); + if let Some(context) = context_task.await { + context.add_to_request_message(&mut request_message); } request_message.content.push(prompt.into()); @@ -479,18 +632,31 @@ impl CodegenAlternative { pub fn handle_stream( &mut self, - model_telemetry_id: String, - model_provider_id: String, - model_api_key: Option, + model: Arc, + strip_invalid_spans: bool, stream: impl 'static + Future>, cx: &mut Context, - ) { + ) -> Task<()> { + let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx); + let session_id = self.session_id; + let model_telemetry_id = model.telemetry_id(); + let model_provider_id = model.provider_id().to_string(); let start_time = Instant::now(); + + // Make a new snapshot and re-resolve anchor in case the document was modified. + // This can happen often if the editor loses focus and is saved + reformatted, + // as in https://github.com/zed-industries/zed/issues/39088 + self.snapshot = self.buffer.read(cx).snapshot(cx); + self.range = self.snapshot.anchor_after(self.range.start) + ..self.snapshot.anchor_after(self.range.end); + let snapshot = self.snapshot.clone(); let selected_text = snapshot .text_for_range(self.range.start..self.range.end) .collect::(); + self.selected_text = Some(selected_text.to_string()); + let selection_start = self.range.start.to_point(&snapshot); // Start with the indentation of the first line in the selection @@ -512,8 +678,6 @@ impl CodegenAlternative { } } - let http_client = cx.http_client(); - let telemetry = self.telemetry.clone(); let language_name = { let multibuffer = self.buffer.read(cx); let snapshot = multibuffer.snapshot(cx); @@ -530,8 +694,10 @@ impl CodegenAlternative { let completion = Arc::new(Mutex::new(String::new())); let completion_clone = completion.clone(); - self.generation = cx.spawn(async move |codegen, cx| { + cx.notify(); + cx.spawn(async move |codegen, cx| { let stream = stream.await; + let token_usage = stream .as_ref() .ok() @@ -544,17 +710,25 @@ impl CodegenAlternative { let model_telemetry_id = model_telemetry_id.clone(); let model_provider_id = model_provider_id.clone(); let (mut diff_tx, mut diff_rx) = mpsc::channel(1); - let executor = cx.background_executor().clone(); let message_id = message_id.clone(); - let line_based_stream_diff: Task> = - cx.background_spawn(async move { + let line_based_stream_diff: Task> = cx.background_spawn({ + let anthropic_reporter = anthropic_reporter.clone(); + let language_name = language_name.clone(); + async move { let mut response_latency = None; let request_start = Instant::now(); let diff = async { - let chunks = StripInvalidSpans::new( - stream?.stream.map_err(|error| error.into()), - ); - futures::pin_mut!(chunks); + let raw_stream = stream?.stream.map_err(|error| error.into()); + + let stripped; + let mut chunks: Pin> + Send>> = + if strip_invalid_spans { + stripped = StripInvalidSpans::new(raw_stream); + Box::pin(stripped) + } else { + Box::pin(raw_stream) + }; + let mut diff = StreamingDiff::new(selected_text.to_string()); let mut line_diff = LineDiff::default(); @@ -643,27 +817,30 @@ impl CodegenAlternative { let result = diff.await; let error_message = result.as_ref().err().map(|error| error.to_string()); - report_assistant_event( - AssistantEventData { - conversation_id: None, - message_id, - kind: AssistantKind::Inline, - phase: AssistantPhase::Response, - model: model_telemetry_id, - model_provider: model_provider_id, - response_latency, - error_message, - language_name: language_name.map(|name| name.to_proto()), - }, - telemetry, - http_client, - model_api_key, - &executor, + telemetry::event!( + "Assistant Responded", + kind = "inline", + phase = "response", + session_id = session_id.to_string(), + model = model_telemetry_id, + model_provider = model_provider_id, + language_name = language_name.as_ref().map(|n| n.to_string()), + message_id = message_id.as_deref(), + response_latency = response_latency, + error_message = error_message.as_deref(), ); + anthropic_reporter.report(language_model::AnthropicEventData { + completion_type: language_model::AnthropicCompletionType::Editor, + event: language_model::AnthropicEventType::Response, + language_name: language_name.map(|n| n.to_string()), + message_id, + }); + result?; Ok(()) - }); + } + }); while let Some((char_ops, line_ops)) = diff_rx.next().await { codegen.update(cx, |codegen, cx| { @@ -741,12 +918,30 @@ impl CodegenAlternative { output_tokens = usage.output_tokens, ) } + cx.emit(CodegenEvent::Finished); cx.notify(); }) .ok(); - }); - cx.notify(); + }) + } + + pub fn current_completion(&self) -> Option { + self.completion.clone() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn current_description(&self) -> Option { + self.description.clone() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn current_failure(&self) -> Option { + self.failure.clone() + } + + pub fn selected_text(&self) -> Option<&str> { + self.selected_text.as_deref() } pub fn stop(&mut self, cx: &mut Context) { @@ -920,6 +1115,224 @@ impl CodegenAlternative { .ok(); }) } + + fn handle_completion( + &mut self, + model: Arc, + completion_stream: Task< + Result< + BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + >, + cx: &mut Context, + ) -> Task<()> { + self.diff = Diff::default(); + self.status = CodegenStatus::Pending; + + cx.notify(); + // Leaving this in generation so that STOP equivalent events are respected even + // while we're still pre-processing the completion event + cx.spawn(async move |codegen, cx| { + let finish_with_status = |status: CodegenStatus, cx: &mut AsyncApp| { + let _ = codegen.update(cx, |this, cx| { + this.status = status; + cx.emit(CodegenEvent::Finished); + cx.notify(); + }); + }; + + let mut completion_events = match completion_stream.await { + Ok(events) => events, + Err(err) => { + finish_with_status(CodegenStatus::Error(err.into()), cx); + return; + } + }; + + enum ToolUseOutput { + Rewrite { + text: String, + description: Option, + }, + Failure(String), + } + + enum ModelUpdate { + Description(String), + Failure(String), + } + + let chars_read_so_far = Arc::new(Mutex::new(0usize)); + let process_tool_use = move |tool_use: LanguageModelToolUse| -> Option { + let mut chars_read_so_far = chars_read_so_far.lock(); + match tool_use.name.as_ref() { + REWRITE_SECTION_TOOL_NAME => { + let Ok(input) = + serde_json::from_value::(tool_use.input) + else { + return None; + }; + let text = input.replacement_text[*chars_read_so_far..].to_string(); + *chars_read_so_far = input.replacement_text.len(); + Some(ToolUseOutput::Rewrite { + text, + description: None, + }) + } + FAILURE_MESSAGE_TOOL_NAME => { + let Ok(mut input) = + serde_json::from_value::(tool_use.input) + else { + return None; + }; + Some(ToolUseOutput::Failure(std::mem::take(&mut input.message))) + } + _ => None, + } + }; + + let (message_tx, mut message_rx) = futures::channel::mpsc::unbounded::(); + + cx.spawn({ + let codegen = codegen.clone(); + async move |cx| { + while let Some(update) = message_rx.next().await { + let _ = codegen.update(cx, |this, _cx| match update { + ModelUpdate::Description(d) => this.description = Some(d), + ModelUpdate::Failure(f) => this.failure = Some(f), + }); + } + } + }) + .detach(); + + let mut message_id = None; + let mut first_text = None; + let last_token_usage = Arc::new(Mutex::new(TokenUsage::default())); + let total_text = Arc::new(Mutex::new(String::new())); + + loop { + if let Some(first_event) = completion_events.next().await { + match first_event { + Ok(LanguageModelCompletionEvent::StartMessage { message_id: id }) => { + message_id = Some(id); + } + Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => { + if let Some(output) = process_tool_use(tool_use) { + let (text, update) = match output { + ToolUseOutput::Rewrite { text, description } => { + (Some(text), description.map(ModelUpdate::Description)) + } + ToolUseOutput::Failure(message) => { + (None, Some(ModelUpdate::Failure(message))) + } + }; + if let Some(update) = update { + let _ = message_tx.unbounded_send(update); + } + first_text = text; + if first_text.is_some() { + break; + } + } + } + Ok(LanguageModelCompletionEvent::UsageUpdate(token_usage)) => { + *last_token_usage.lock() = token_usage; + } + Ok(LanguageModelCompletionEvent::Text(text)) => { + let mut lock = total_text.lock(); + lock.push_str(&text); + } + Ok(e) => { + log::warn!("Unexpected event: {:?}", e); + break; + } + Err(e) => { + finish_with_status(CodegenStatus::Error(e.into()), cx); + break; + } + } + } + } + + let Some(first_text) = first_text else { + finish_with_status(CodegenStatus::Done, cx); + return; + }; + + let move_last_token_usage = last_token_usage.clone(); + + let text_stream = Box::pin(futures::stream::once(async { Ok(first_text) }).chain( + completion_events.filter_map(move |e| { + let process_tool_use = process_tool_use.clone(); + let last_token_usage = move_last_token_usage.clone(); + let total_text = total_text.clone(); + let mut message_tx = message_tx.clone(); + async move { + match e { + Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => { + let Some(output) = process_tool_use(tool_use) else { + return None; + }; + let (text, update) = match output { + ToolUseOutput::Rewrite { text, description } => { + (Some(text), description.map(ModelUpdate::Description)) + } + ToolUseOutput::Failure(message) => { + (None, Some(ModelUpdate::Failure(message))) + } + }; + if let Some(update) = update { + let _ = message_tx.send(update).await; + } + text.map(Ok) + } + Ok(LanguageModelCompletionEvent::UsageUpdate(token_usage)) => { + *last_token_usage.lock() = token_usage; + None + } + Ok(LanguageModelCompletionEvent::Text(text)) => { + let mut lock = total_text.lock(); + lock.push_str(&text); + None + } + Ok(LanguageModelCompletionEvent::Stop(_reason)) => None, + e => { + log::error!("UNEXPECTED EVENT {:?}", e); + None + } + } + } + }), + )); + + let language_model_text_stream = LanguageModelTextStream { + message_id: message_id, + stream: text_stream, + last_token_usage, + }; + + let Some(task) = codegen + .update(cx, move |codegen, cx| { + codegen.handle_stream( + model, + /* strip_invalid_spans: */ false, + async { Ok(language_model_text_stream) }, + cx, + ) + }) + .ok() + else { + return; + }; + + task.await; + }) + } } #[derive(Copy, Clone, Debug)] @@ -1075,15 +1488,19 @@ impl Diff { #[cfg(test)] mod tests { use super::*; - use fs::FakeFs; use futures::{ Stream, stream::{self}, }; use gpui::TestAppContext; use indoc::indoc; - use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust}; - use language_model::{LanguageModelRegistry, TokenUsage}; + use language::{Buffer, Point}; + use language_model::fake_provider::FakeLanguageModel; + use language_model::{ + LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRegistry, + LanguageModelToolUse, StopReason, TokenUsage, + }; + use languages::rust_lang; use rand::prelude::*; use settings::SettingsStore; use std::{future, sync::Arc}; @@ -1100,25 +1517,20 @@ mod tests { } } "}; - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let range = buffer.read_with(cx, |buffer, cx| { let snapshot = buffer.snapshot(cx); snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5)) }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, vec![], cx).await; let codegen = cx.new(|cx| { CodegenAlternative::new( buffer.clone(), range.clone(), true, - None, - project.downgrade(), - None, - None, prompt_builder, + Uuid::new_v4(), cx, ) }); @@ -1167,25 +1579,20 @@ mod tests { le } "}; - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let range = buffer.read_with(cx, |buffer, cx| { let snapshot = buffer.snapshot(cx); snapshot.anchor_before(Point::new(1, 6))..snapshot.anchor_after(Point::new(1, 6)) }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, vec![], cx).await; let codegen = cx.new(|cx| { CodegenAlternative::new( buffer.clone(), range.clone(), true, - None, - project.downgrade(), - None, - None, prompt_builder, + Uuid::new_v4(), cx, ) }); @@ -1236,25 +1643,20 @@ mod tests { " \n", "}\n" // ); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let range = buffer.read_with(cx, |buffer, cx| { let snapshot = buffer.snapshot(cx); snapshot.anchor_before(Point::new(1, 2))..snapshot.anchor_after(Point::new(1, 2)) }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, vec![], cx).await; let codegen = cx.new(|cx| { CodegenAlternative::new( buffer.clone(), range.clone(), true, - None, - project.downgrade(), - None, - None, prompt_builder, + Uuid::new_v4(), cx, ) }); @@ -1312,18 +1714,13 @@ mod tests { snapshot.anchor_before(Point::new(0, 0))..snapshot.anchor_after(Point::new(4, 2)) }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, vec![], cx).await; let codegen = cx.new(|cx| { CodegenAlternative::new( buffer.clone(), range.clone(), true, - None, - project.downgrade(), - None, - None, prompt_builder, + Uuid::new_v4(), cx, ) }); @@ -1362,25 +1759,20 @@ mod tests { let x = 0; } "}; - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let range = buffer.read_with(cx, |buffer, cx| { let snapshot = buffer.snapshot(cx); snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 14)) }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let fs = FakeFs::new(cx.executor()); - let project = Project::test(fs, vec![], cx).await; let codegen = cx.new(|cx| { CodegenAlternative::new( buffer.clone(), range.clone(), false, - None, - project.downgrade(), - None, - None, prompt_builder, + Uuid::new_v4(), cx, ) }); @@ -1419,6 +1811,51 @@ mod tests { ); } + // When not streaming tool calls, we strip backticks as part of parsing the model's + // plain text response. This is a regression test for a bug where we stripped + // backticks incorrectly. + #[gpui::test] + async fn test_allows_model_to_output_backticks(cx: &mut TestAppContext) { + init_test(cx); + let text = "- Improved; `cmd+click` behavior. Now requires `cmd` to be pressed before the click starts or it doesn't run. ([#44579](https://github.com/zed-industries/zed/pull/44579); thanks [Zachiah](https://github.com/Zachiah))"; + let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(0, 0))..snapshot.anchor_after(Point::new(0, 0)) + }); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let codegen = cx.new(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + true, + prompt_builder, + Uuid::new_v4(), + cx, + ) + }); + + let events_tx = simulate_tool_based_completion(&codegen, cx); + let chunk_len = text.find('`').unwrap(); + events_tx + .unbounded_send(rewrite_tool_use("tool_1", &text[..chunk_len], false)) + .unwrap(); + events_tx + .unbounded_send(rewrite_tool_use("tool_2", &text, true)) + .unwrap(); + events_tx + .unbounded_send(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)) + .unwrap(); + drop(events_tx); + cx.run_until_parked(); + + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + } + #[gpui::test] async fn test_strip_invalid_spans_from_codeblock() { assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await; @@ -1469,11 +1906,11 @@ mod tests { cx: &mut TestAppContext, ) -> mpsc::UnboundedSender { let (chunks_tx, chunks_rx) = mpsc::unbounded(); + let model = Arc::new(FakeLanguageModel::default()); codegen.update(cx, |codegen, cx| { - codegen.handle_stream( - String::new(), - String::new(), - None, + codegen.generation = codegen.handle_stream( + model, + /* strip_invalid_spans: */ false, future::ready(Ok(LanguageModelTextStream { message_id: None, stream: chunks_rx.map(Ok).boxed(), @@ -1485,26 +1922,38 @@ mod tests { chunks_tx } - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_indents_query( - r#" - (call_expression) @indent - (field_expression) @indent - (_ "(" ")" @end) @indent - (_ "{" "}" @end) @indent - "#, - ) - .unwrap() + fn simulate_tool_based_completion( + codegen: &Entity, + cx: &mut TestAppContext, + ) -> mpsc::UnboundedSender { + let (events_tx, events_rx) = mpsc::unbounded(); + let model = Arc::new(FakeLanguageModel::default()); + codegen.update(cx, |codegen, cx| { + let completion_stream = Task::ready(Ok(events_rx.map(Ok).boxed() + as BoxStream< + 'static, + Result, + >)); + codegen.generation = codegen.handle_completion(model, completion_stream, cx); + }); + events_tx + } + + fn rewrite_tool_use( + id: &str, + replacement_text: &str, + is_complete: bool, + ) -> LanguageModelCompletionEvent { + let input = RewriteSectionInput { + replacement_text: replacement_text.into(), + }; + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + id: id.into(), + name: REWRITE_SECTION_TOOL_NAME.into(), + raw_input: serde_json::to_string(&input).unwrap(), + input: serde_json::to_value(&input).unwrap(), + is_input_complete: is_complete, + thought_signature: None, + }) } } diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs similarity index 55% rename from crates/agent_ui/src/acp/completion_provider.rs rename to crates/agent_ui/src/completion_provider.rs index 84d75ebe4133b3145b892eec659867b137bce2f0..206a2b3282b5471e8d5e8d18788519c3853dca55 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -1,41 +1,133 @@ -use std::cell::RefCell; +use std::cmp::Reverse; use std::ops::Range; use std::path::PathBuf; -use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use acp_thread::MentionUri; use agent::{HistoryEntry, HistoryStore}; -use agent_client_protocol as acp; use anyhow::Result; -use editor::{CompletionProvider, Editor, ExcerptId}; -use fuzzy::{StringMatch, StringMatchCandidate}; +use editor::{ + CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH, +}; +use fuzzy::{PathMatch, StringMatch, StringMatchCandidate}; use gpui::{App, Entity, Task, WeakEntity}; use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId}; use lsp::CompletionContext; +use ordered_float::OrderedFloat; use project::lsp_store::{CompletionDocumentation, SymbolLocation}; use project::{ - Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project, - ProjectPath, Symbol, WorktreeId, + Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, + PathMatchCandidateSet, Project, ProjectPath, Symbol, WorktreeId, }; -use prompt_store::PromptStore; +use prompt_store::{PromptStore, UserPromptId}; use rope::Point; use text::{Anchor, ToPoint as _}; use ui::prelude::*; +use util::ResultExt as _; +use util::paths::PathStyle; use util::rel_path::RelPath; +use util::truncate_and_remove_front; use workspace::Workspace; use crate::AgentPanel; -use crate::acp::message_editor::MessageEditor; -use crate::context_picker::file_context_picker::{FileMatch, search_files}; -use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules}; -use crate::context_picker::symbol_context_picker::SymbolMatch; -use crate::context_picker::symbol_context_picker::search_symbols; -use crate::context_picker::thread_context_picker::search_threads; -use crate::context_picker::{ - ContextPickerAction, ContextPickerEntry, ContextPickerMode, selection_ranges, -}; +use crate::mention_set::MentionSet; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PromptContextEntry { + Mode(PromptContextType), + Action(PromptContextAction), +} + +impl PromptContextEntry { + pub fn keyword(&self) -> &'static str { + match self { + Self::Mode(mode) => mode.keyword(), + Self::Action(action) => action.keyword(), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PromptContextType { + File, + Symbol, + Fetch, + Thread, + Rules, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PromptContextAction { + AddSelections, +} + +impl PromptContextAction { + pub fn keyword(&self) -> &'static str { + match self { + Self::AddSelections => "selection", + } + } + + pub fn label(&self) -> &'static str { + match self { + Self::AddSelections => "Selection", + } + } + + pub fn icon(&self) -> IconName { + match self { + Self::AddSelections => IconName::Reader, + } + } +} + +impl TryFrom<&str> for PromptContextType { + type Error = String; + + fn try_from(value: &str) -> Result { + match value { + "file" => Ok(Self::File), + "symbol" => Ok(Self::Symbol), + "fetch" => Ok(Self::Fetch), + "thread" => Ok(Self::Thread), + "rule" => Ok(Self::Rules), + _ => Err(format!("Invalid context picker mode: {}", value)), + } + } +} + +impl PromptContextType { + pub fn keyword(&self) -> &'static str { + match self { + Self::File => "file", + Self::Symbol => "symbol", + Self::Fetch => "fetch", + Self::Thread => "thread", + Self::Rules => "rule", + } + } + + pub fn label(&self) -> &'static str { + match self { + Self::File => "Files & Directories", + Self::Symbol => "Symbols", + Self::Fetch => "Fetch", + Self::Thread => "Threads", + Self::Rules => "Rules", + } + } + + pub fn icon(&self) -> IconName { + match self { + Self::File => IconName::File, + Self::Symbol => IconName::Code, + Self::Fetch => IconName::ToolWeb, + Self::Thread => IconName::Thread, + Self::Rules => IconName::Reader, + } + } +} pub(crate) enum Match { File(FileMatch), @@ -47,11 +139,6 @@ pub(crate) enum Match { Entry(EntryMatch), } -pub struct EntryMatch { - mat: Option, - entry: ContextPickerEntry, -} - impl Match { pub fn score(&self) -> f64 { match self { @@ -66,58 +153,95 @@ impl Match { } } -pub struct ContextPickerCompletionProvider { - message_editor: WeakEntity, - workspace: WeakEntity, +pub struct EntryMatch { + mat: Option, + entry: PromptContextEntry, +} + +#[derive(Debug, Clone)] +pub struct RulesContextEntry { + pub prompt_id: UserPromptId, + pub title: SharedString, +} + +#[derive(Debug, Clone)] +pub struct AvailableCommand { + pub name: Arc, + pub description: Arc, + pub requires_argument: bool, +} + +pub trait PromptCompletionProviderDelegate: Send + Sync + 'static { + fn supports_context(&self, mode: PromptContextType, cx: &App) -> bool { + self.supported_modes(cx).contains(&mode) + } + fn supported_modes(&self, cx: &App) -> Vec; + fn supports_images(&self, cx: &App) -> bool; + + fn available_commands(&self, cx: &App) -> Vec; + fn confirm_command(&self, cx: &mut App); +} + +pub struct PromptCompletionProvider { + source: Arc, + editor: WeakEntity, + mention_set: Entity, history_store: Entity, prompt_store: Option>, - prompt_capabilities: Rc>, - available_commands: Rc>>, + workspace: WeakEntity, } -impl ContextPickerCompletionProvider { +impl PromptCompletionProvider { pub fn new( - message_editor: WeakEntity, - workspace: WeakEntity, + source: T, + editor: WeakEntity, + mention_set: Entity, history_store: Entity, prompt_store: Option>, - prompt_capabilities: Rc>, - available_commands: Rc>>, + workspace: WeakEntity, ) -> Self { Self { - message_editor, + source: Arc::new(source), + editor, + mention_set, workspace, history_store, prompt_store, - prompt_capabilities, - available_commands, } } fn completion_for_entry( - entry: ContextPickerEntry, + entry: PromptContextEntry, source_range: Range, - message_editor: WeakEntity, + editor: WeakEntity, + mention_set: WeakEntity, workspace: &Entity, cx: &mut App, ) -> Option { match entry { - ContextPickerEntry::Mode(mode) => Some(Completion { + PromptContextEntry::Mode(mode) => Some(Completion { replace_range: source_range, new_text: format!("@{} ", mode.keyword()), label: CodeLabel::plain(mode.label().to_string(), None), icon_path: Some(mode.icon().path().into()), documentation: None, source: project::CompletionSource::Custom, + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, // This ensures that when a user accepts this completion, the // completion menu will still be shown after "@category " is // inserted confirm: Some(Arc::new(|_, _, _| true)), }), - ContextPickerEntry::Action(action) => { - Self::completion_for_action(action, source_range, message_editor, workspace, cx) - } + PromptContextEntry::Action(action) => Self::completion_for_action( + action, + source_range, + editor, + mention_set, + workspace, + cx, + ), } } @@ -125,7 +249,10 @@ impl ContextPickerCompletionProvider { thread_entry: HistoryEntry, source_range: Range, recent: bool, - editor: WeakEntity, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, + workspace: Entity, cx: &mut App, ) -> Completion { let uri = thread_entry.mention_uri(); @@ -146,13 +273,18 @@ impl ContextPickerCompletionProvider { documentation: None, insert_text_mode: None, source: project::CompletionSource::Custom, + match_start: None, + snippet_deduplication_key: None, icon_path: Some(icon_for_completion), confirm: Some(confirm_completion_callback( thread_entry.title().clone(), source_range.start, new_text_len - 1, - editor, uri, + source, + editor, + mention_set, + workspace, )), } } @@ -160,7 +292,10 @@ impl ContextPickerCompletionProvider { fn completion_for_rules( rule: RulesContextEntry, source_range: Range, - editor: WeakEntity, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, + workspace: Entity, cx: &mut App, ) -> Completion { let uri = MentionUri::Rule { @@ -177,13 +312,18 @@ impl ContextPickerCompletionProvider { documentation: None, insert_text_mode: None, source: project::CompletionSource::Custom, + match_start: None, + snippet_deduplication_key: None, icon_path: Some(icon_path), confirm: Some(confirm_completion_callback( rule.title, source_range.start, new_text_len - 1, - editor, uri, + source, + editor, + mention_set, + workspace, )), } } @@ -194,20 +334,25 @@ impl ContextPickerCompletionProvider { is_recent: bool, is_directory: bool, source_range: Range, - message_editor: WeakEntity, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, + workspace: Entity, project: Entity, + label_max_chars: usize, cx: &mut App, ) -> Option { let path_style = project.read(cx).path_style(cx); let (file_name, directory) = - crate::context_picker::file_context_picker::extract_file_name_and_directory( - &project_path.path, - path_prefix, - path_style, - ); + extract_file_name_and_directory(&project_path.path, path_prefix, path_style); - let label = - build_code_label_for_full_path(&file_name, directory.as_ref().map(|s| s.as_ref()), cx); + let label = build_code_label_for_path( + &file_name, + directory.as_ref().map(|s| s.as_ref()), + None, + label_max_chars, + cx, + ); let abs_path = project.read(cx).absolute_path(&project_path, cx)?; @@ -233,13 +378,18 @@ impl ContextPickerCompletionProvider { documentation: None, source: project::CompletionSource::Custom, icon_path: Some(completion_icon_path), + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm: Some(confirm_completion_callback( file_name, source_range.start, new_text_len - 1, - message_editor, uri, + source, + editor, + mention_set, + workspace, )), }) } @@ -247,8 +397,11 @@ impl ContextPickerCompletionProvider { fn completion_for_symbol( symbol: Symbol, source_range: Range, - message_editor: WeakEntity, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, workspace: Entity, + label_max_chars: usize, cx: &mut App, ) -> Option { let project = workspace.read(cx).project().clone(); @@ -267,7 +420,13 @@ impl ContextPickerCompletionProvider { ), }; - let label = build_symbol_label(&symbol.name, &file_name, symbol.range.start.0.row + 1, cx); + let label = build_code_label_for_path( + &symbol.name, + Some(&file_name), + Some(symbol.range.start.0.row + 1), + label_max_chars, + cx, + ); let uri = MentionUri::Symbol { abs_path, @@ -284,13 +443,18 @@ impl ContextPickerCompletionProvider { documentation: None, source: project::CompletionSource::Custom, icon_path: Some(icon_path), + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm: Some(confirm_completion_callback( symbol.name.into(), source_range.start, new_text_len - 1, - message_editor, uri, + source, + editor, + mention_set, + workspace, )), }) } @@ -298,7 +462,10 @@ impl ContextPickerCompletionProvider { fn completion_for_fetch( source_range: Range, url_to_fetch: SharedString, - message_editor: WeakEntity, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, + workspace: Entity, cx: &mut App, ) -> Option { let new_text = format!("@fetch {} ", url_to_fetch); @@ -316,26 +483,32 @@ impl ContextPickerCompletionProvider { documentation: None, source: project::CompletionSource::Custom, icon_path: Some(icon_path), + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm: Some(confirm_completion_callback( url_to_fetch.to_string().into(), source_range.start, new_text.len() - 1, - message_editor, mention_uri, + source, + editor, + mention_set, + workspace, )), }) } pub(crate) fn completion_for_action( - action: ContextPickerAction, + action: PromptContextAction, source_range: Range, - message_editor: WeakEntity, + editor: WeakEntity, + mention_set: WeakEntity, workspace: &Entity, cx: &mut App, ) -> Option { let (new_text, on_action) = match action { - ContextPickerAction::AddSelections => { + PromptContextAction::AddSelections => { const PLACEHOLDER: &str = "selection "; let selections = selection_ranges(workspace, cx) .into_iter() @@ -354,20 +527,24 @@ impl ContextPickerCompletionProvider { let callback = Arc::new({ let source_range = source_range.clone(); move |_, window: &mut Window, cx: &mut App| { + let editor = editor.clone(); let selections = selections.clone(); - let message_editor = message_editor.clone(); + let mention_set = mention_set.clone(); let source_range = source_range.clone(); window.defer(cx, move |window, cx| { - message_editor - .update(cx, |message_editor, cx| { - message_editor.confirm_mention_for_selection( - source_range, - selections, - window, - cx, - ) - }) - .ok(); + if let Some(editor) = editor.upgrade() { + mention_set + .update(cx, |store, cx| { + store.confirm_mention_for_selection( + source_range, + selections, + editor, + window, + cx, + ) + }) + .ok(); + } }); false } @@ -384,6 +561,8 @@ impl ContextPickerCompletionProvider { icon_path: Some(action.icon().path().into()), documentation: None, source: project::CompletionSource::Custom, + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, // This ensures that when a user accepts this completion, the // completion menu will still be shown after "@category " is @@ -392,12 +571,8 @@ impl ContextPickerCompletionProvider { }) } - fn search_slash_commands( - &self, - query: String, - cx: &mut App, - ) -> Task> { - let commands = self.available_commands.borrow().clone(); + fn search_slash_commands(&self, query: String, cx: &mut App) -> Task> { + let commands = self.source.available_commands(cx); if commands.is_empty() { return Task::ready(Vec::new()); } @@ -429,7 +604,7 @@ impl ContextPickerCompletionProvider { fn search_mentions( &self, - mode: Option, + mode: Option, query: String, cancellation_flag: Arc, cx: &mut App, @@ -438,7 +613,7 @@ impl ContextPickerCompletionProvider { return Task::ready(Vec::default()); }; match mode { - Some(ContextPickerMode::File) => { + Some(PromptContextType::File) => { let search_files_task = search_files(query, cancellation_flag, &workspace, cx); cx.background_spawn(async move { search_files_task @@ -449,7 +624,7 @@ impl ContextPickerCompletionProvider { }) } - Some(ContextPickerMode::Symbol) => { + Some(PromptContextType::Symbol) => { let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx); cx.background_spawn(async move { search_symbols_task @@ -460,7 +635,7 @@ impl ContextPickerCompletionProvider { }) } - Some(ContextPickerMode::Thread) => { + Some(PromptContextType::Thread) => { let search_threads_task = search_threads(query, cancellation_flag, &self.history_store, cx); cx.background_spawn(async move { @@ -472,7 +647,7 @@ impl ContextPickerCompletionProvider { }) } - Some(ContextPickerMode::Fetch) => { + Some(PromptContextType::Fetch) => { if !query.is_empty() { Task::ready(vec![Match::Fetch(query.into())]) } else { @@ -480,7 +655,7 @@ impl ContextPickerCompletionProvider { } } - Some(ContextPickerMode::Rules) => { + Some(PromptContextType::Rules) => { if let Some(prompt_store) = self.prompt_store.as_ref() { let search_rules_task = search_rules(query, cancellation_flag, prompt_store, cx); @@ -570,9 +745,8 @@ impl ContextPickerCompletionProvider { let mut recent = Vec::with_capacity(6); let mut mentions = self - .message_editor - .read_with(cx, |message_editor, _cx| message_editor.mentions()) - .unwrap_or_default(); + .mention_set + .read_with(cx, |store, _cx| store.mentions()); let workspace = workspace.read(cx); let project = workspace.project().read(cx); let include_root_name = workspace.visible_worktrees(cx).count() > 1; @@ -623,7 +797,7 @@ impl ContextPickerCompletionProvider { }), ); - if self.prompt_capabilities.borrow().embedded_context { + if self.source.supports_context(PromptContextType::Thread, cx) { const RECENT_COUNT: usize = 2; let threads = self .history_store @@ -644,15 +818,14 @@ impl ContextPickerCompletionProvider { &self, workspace: &Entity, cx: &mut App, - ) -> Vec { - let embedded_context = self.prompt_capabilities.borrow().embedded_context; + ) -> Vec { let mut entries = vec![ - ContextPickerEntry::Mode(ContextPickerMode::File), - ContextPickerEntry::Mode(ContextPickerMode::Symbol), + PromptContextEntry::Mode(PromptContextType::File), + PromptContextEntry::Mode(PromptContextType::Symbol), ]; - if embedded_context { - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Thread)); + if self.source.supports_context(PromptContextType::Thread, cx) { + entries.push(PromptContextEntry::Mode(PromptContextType::Thread)); } let has_selection = workspace @@ -665,69 +838,41 @@ impl ContextPickerCompletionProvider { }) }); if has_selection { - entries.push(ContextPickerEntry::Action( - ContextPickerAction::AddSelections, + entries.push(PromptContextEntry::Action( + PromptContextAction::AddSelections, )); } - if embedded_context { - if self.prompt_store.is_some() { - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules)); - } + if self.prompt_store.is_some() && self.source.supports_context(PromptContextType::Rules, cx) + { + entries.push(PromptContextEntry::Mode(PromptContextType::Rules)); + } - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch)); + if self.source.supports_context(PromptContextType::Fetch, cx) { + entries.push(PromptContextEntry::Mode(PromptContextType::Fetch)); } entries } } -fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -> CodeLabel { - let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabelBuilder::default(); - - label.push_str(symbol_name, None); - label.push_str(" ", None); - label.push_str(&format!("{} L{}", file_name, line), comment_id); - - label.build() -} - -fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { - let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabelBuilder::default(); - - label.push_str(file_name, None); - label.push_str(" ", None); - - if let Some(directory) = directory { - label.push_str(directory, comment_id); - } - - label.build() -} - -impl CompletionProvider for ContextPickerCompletionProvider { +impl CompletionProvider for PromptCompletionProvider { fn completions( &self, _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: Anchor, _trigger: CompletionContext, - _window: &mut Window, + window: &mut Window, cx: &mut Context, ) -> Task>> { - let state = buffer.update(cx, |buffer, _cx| { + let state = buffer.update(cx, |buffer, cx| { let position = buffer_position.to_point(buffer); let line_start = Point::new(position.row, 0); let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); let line = lines.next()?; - ContextCompletion::try_parse( - line, - offset_to_line, - self.prompt_capabilities.borrow().embedded_context, - ) + PromptCompletion::try_parse(line, offset_to_line, &self.source.supported_modes(cx)) }); let Some(state) = state else { return Task::ready(Ok(Vec::new())); @@ -742,10 +887,11 @@ impl CompletionProvider for ContextPickerCompletionProvider { let source_range = snapshot.anchor_before(state.source_range().start) ..snapshot.anchor_after(state.source_range().end); - let editor = self.message_editor.clone(); - + let source = self.source.clone(); + let editor = self.editor.clone(); + let mention_set = self.mention_set.downgrade(); match state { - ContextCompletion::SlashCommand(SlashCommandCompletion { + PromptCompletion::SlashCommand(SlashCommandCompletion { command, argument, .. }) => { let search_task = self.search_slash_commands(command.unwrap_or_default(), cx); @@ -760,7 +906,8 @@ impl CompletionProvider for ContextPickerCompletionProvider { format!("/{} ", command.name) }; - let is_missing_argument = argument.is_none() && command.input.is_some(); + let is_missing_argument = + command.requires_argument && argument.is_none(); Completion { replace_range: source_range.clone(), new_text, @@ -770,28 +917,22 @@ impl CompletionProvider for ContextPickerCompletionProvider { )), source: project::CompletionSource::Custom, icon_path: None, + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm: Some(Arc::new({ - let editor = editor.clone(); + let source = source.clone(); move |intent, _window, cx| { if !is_missing_argument { cx.defer({ - let editor = editor.clone(); - move |cx| { - editor - .update(cx, |editor, cx| { - match intent { - CompletionIntent::Complete - | CompletionIntent::CompleteWithInsert - | CompletionIntent::CompleteWithReplace => { - if !is_missing_argument { - editor.send(cx); - } - } - CompletionIntent::Compose => {} - } - }) - .ok(); + let source = source.clone(); + move |cx| match intent { + CompletionIntent::Complete + | CompletionIntent::CompleteWithInsert + | CompletionIntent::CompleteWithReplace => { + source.confirm_command(cx); + } + CompletionIntent::Compose => {} } }); } @@ -813,11 +954,36 @@ impl CompletionProvider for ContextPickerCompletionProvider { }]) }) } - ContextCompletion::Mention(MentionCompletion { mode, argument, .. }) => { + PromptCompletion::Mention(MentionCompletion { mode, argument, .. }) => { let query = argument.unwrap_or_default(); let search_task = self.search_mentions(mode, query, Arc::::default(), cx); + // Calculate maximum characters available for the full label (file_name + space + directory) + // based on maximum menu width after accounting for padding, spacing, and icon width + let label_max_chars = { + // Base06 left padding + Base06 gap + Base06 right padding + icon width + let used_pixels = DynamicSpacing::Base06.px(cx) * 3.0 + + IconSize::XSmall.rems() * window.rem_size(); + + let style = window.text_style(); + let font_id = window.text_system().resolve_font(&style.font()); + let font_size = TextSize::Small.rems(cx).to_pixels(window.rem_size()); + + // Fallback em_width of 10px matches file_finder.rs fallback for TextSize::Small + let em_width = cx + .text_system() + .em_width(font_id, font_size) + .unwrap_or(px(10.0)); + + // Calculate available pixels for text (file_name + directory) + // Using max width since dynamic_width allows the menu to expand up to this + let available_pixels = COMPLETION_MENU_MAX_WIDTH - used_pixels; + + // Convert to character count (total available for file_name + directory) + (f32::from(available_pixels) / f32::from(em_width)) as usize + }; + cx.spawn(async move |_, cx| { let matches = search_task.await; @@ -849,8 +1015,12 @@ impl CompletionProvider for ContextPickerCompletionProvider { is_recent, mat.is_dir, source_range.clone(), + source.clone(), editor.clone(), + mention_set.clone(), + workspace.clone(), project.clone(), + label_max_chars, cx, ) } @@ -859,8 +1029,11 @@ impl CompletionProvider for ContextPickerCompletionProvider { Self::completion_for_symbol( symbol, source_range.clone(), + source.clone(), editor.clone(), + mention_set.clone(), workspace.clone(), + label_max_chars, cx, ) } @@ -869,7 +1042,10 @@ impl CompletionProvider for ContextPickerCompletionProvider { thread, source_range.clone(), false, + source.clone(), editor.clone(), + mention_set.clone(), + workspace.clone(), cx, )), @@ -877,21 +1053,30 @@ impl CompletionProvider for ContextPickerCompletionProvider { thread, source_range.clone(), true, + source.clone(), editor.clone(), + mention_set.clone(), + workspace.clone(), cx, )), Match::Rules(user_rules) => Some(Self::completion_for_rules( user_rules, source_range.clone(), + source.clone(), editor.clone(), + mention_set.clone(), + workspace.clone(), cx, )), Match::Fetch(url) => Self::completion_for_fetch( source_range.clone(), url, + source.clone(), editor.clone(), + mention_set.clone(), + workspace.clone(), cx, ), @@ -900,6 +1085,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { entry, source_range.clone(), editor.clone(), + mention_set.clone(), &workspace, cx, ) @@ -928,7 +1114,6 @@ impl CompletionProvider for ContextPickerCompletionProvider { position: language::Anchor, _text: &str, _trigger_in_words: bool, - _menu_is_open: bool, cx: &mut Context, ) -> bool { let buffer = buffer.read(cx); @@ -937,27 +1122,24 @@ impl CompletionProvider for ContextPickerCompletionProvider { let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); if let Some(line) = lines.next() { - ContextCompletion::try_parse( - line, - offset_to_line, - self.prompt_capabilities.borrow().embedded_context, - ) - .filter(|completion| { - // Right now we don't support completing arguments of slash commands - let is_slash_command_with_argument = matches!( - completion, - ContextCompletion::SlashCommand(SlashCommandCompletion { - argument: Some(_), - .. - }) - ); - !is_slash_command_with_argument - }) - .map(|completion| { - completion.source_range().start <= offset_to_line + position.column as usize - && completion.source_range().end >= offset_to_line + position.column as usize - }) - .unwrap_or(false) + PromptCompletion::try_parse(line, offset_to_line, &self.source.supported_modes(cx)) + .filter(|completion| { + // Right now we don't support completing arguments of slash commands + let is_slash_command_with_argument = matches!( + completion, + PromptCompletion::SlashCommand(SlashCommandCompletion { + argument: Some(_), + .. + }) + ); + !is_slash_command_with_argument + }) + .map(|completion| { + completion.source_range().start <= offset_to_line + position.column as usize + && completion.source_range().end + >= offset_to_line + position.column as usize + }) + .unwrap_or(false) } else { false } @@ -972,44 +1154,56 @@ impl CompletionProvider for ContextPickerCompletionProvider { } } -fn confirm_completion_callback( +fn confirm_completion_callback( crease_text: SharedString, start: Anchor, content_len: usize, - message_editor: WeakEntity, mention_uri: MentionUri, + source: Arc, + editor: WeakEntity, + mention_set: WeakEntity, + workspace: Entity, ) -> Arc bool + Send + Sync> { Arc::new(move |_, window, cx| { - let message_editor = message_editor.clone(); + let source = source.clone(); + let editor = editor.clone(); + let mention_set = mention_set.clone(); let crease_text = crease_text.clone(); let mention_uri = mention_uri.clone(); + let workspace = workspace.clone(); window.defer(cx, move |window, cx| { - message_editor - .clone() - .update(cx, |message_editor, cx| { - message_editor - .confirm_mention_completion( - crease_text, - start, - content_len, - mention_uri, - window, - cx, - ) - .detach(); - }) - .ok(); + if let Some(editor) = editor.upgrade() { + mention_set + .clone() + .update(cx, |mention_set, cx| { + mention_set + .confirm_mention_completion( + crease_text, + start, + content_len, + mention_uri, + source.supports_images(cx), + editor, + &workspace, + window, + cx, + ) + .detach(); + }) + .ok(); + } }); false }) } -enum ContextCompletion { +#[derive(Debug, PartialEq)] +enum PromptCompletion { SlashCommand(SlashCommandCompletion), Mention(MentionCompletion), } -impl ContextCompletion { +impl PromptCompletion { fn source_range(&self) -> Range { match self { Self::SlashCommand(completion) => completion.source_range.clone(), @@ -1017,16 +1211,19 @@ impl ContextCompletion { } } - fn try_parse(line: &str, offset_to_line: usize, allow_non_file_mentions: bool) -> Option { - if let Some(command) = SlashCommandCompletion::try_parse(line, offset_to_line) { - Some(Self::SlashCommand(command)) - } else if let Some(mention) = - MentionCompletion::try_parse(allow_non_file_mentions, line, offset_to_line) - { - Some(Self::Mention(mention)) - } else { - None + fn try_parse( + line: &str, + offset_to_line: usize, + supported_modes: &[PromptContextType], + ) -> Option { + if line.contains('@') { + if let Some(mention) = + MentionCompletion::try_parse(line, offset_to_line, supported_modes) + { + return Some(Self::Mention(mention)); + } } + SlashCommandCompletion::try_parse(line, offset_to_line).map(Self::SlashCommand) } } @@ -1078,12 +1275,16 @@ impl SlashCommandCompletion { #[derive(Debug, Default, PartialEq)] struct MentionCompletion { source_range: Range, - mode: Option, + mode: Option, argument: Option, } impl MentionCompletion { - fn try_parse(allow_non_file_mentions: bool, line: &str, offset_to_line: usize) -> Option { + fn try_parse( + line: &str, + offset_to_line: usize, + supported_modes: &[PromptContextType], + ) -> Option { let last_mention_start = line.rfind('@')?; // No whitespace immediately after '@' @@ -1117,8 +1318,8 @@ impl MentionCompletion { // Safe since we check no leading whitespace above end += mode_text.len(); - if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() - && (allow_non_file_mentions || matches!(parsed_mode, ContextPickerMode::File)) + if let Some(parsed_mode) = PromptContextType::try_from(mode_text).ok() + && supported_modes.contains(&parsed_mode) { mode = Some(parsed_mode); } else { @@ -1152,10 +1353,379 @@ impl MentionCompletion { } } +pub(crate) fn search_files( + query: String, + cancellation_flag: Arc, + workspace: &Entity, + cx: &App, +) -> Task> { + if query.is_empty() { + let workspace = workspace.read(cx); + let project = workspace.project().read(cx); + let visible_worktrees = workspace.visible_worktrees(cx).collect::>(); + let include_root_name = visible_worktrees.len() > 1; + + let recent_matches = workspace + .recent_navigation_history(Some(10), cx) + .into_iter() + .map(|(project_path, _)| { + let path_prefix = if include_root_name { + project + .worktree_for_id(project_path.worktree_id, cx) + .map(|wt| wt.read(cx).root_name().into()) + .unwrap_or_else(|| RelPath::empty().into()) + } else { + RelPath::empty().into() + }; + + FileMatch { + mat: PathMatch { + score: 0., + positions: Vec::new(), + worktree_id: project_path.worktree_id.to_usize(), + path: project_path.path, + path_prefix, + distance_to_relative_ancestor: 0, + is_dir: false, + }, + is_recent: true, + } + }); + + let file_matches = visible_worktrees.into_iter().flat_map(|worktree| { + let worktree = worktree.read(cx); + let path_prefix: Arc = if include_root_name { + worktree.root_name().into() + } else { + RelPath::empty().into() + }; + worktree.entries(false, 0).map(move |entry| FileMatch { + mat: PathMatch { + score: 0., + positions: Vec::new(), + worktree_id: worktree.id().to_usize(), + path: entry.path.clone(), + path_prefix: path_prefix.clone(), + distance_to_relative_ancestor: 0, + is_dir: entry.is_dir(), + }, + is_recent: false, + }) + }); + + Task::ready(recent_matches.chain(file_matches).collect()) + } else { + let worktrees = workspace.read(cx).visible_worktrees(cx).collect::>(); + let include_root_name = worktrees.len() > 1; + let candidate_sets = worktrees + .into_iter() + .map(|worktree| { + let worktree = worktree.read(cx); + + PathMatchCandidateSet { + snapshot: worktree.snapshot(), + include_ignored: worktree.root_entry().is_some_and(|entry| entry.is_ignored), + include_root_name, + candidates: project::Candidates::Entries, + } + }) + .collect::>(); + + let executor = cx.background_executor().clone(); + cx.foreground_executor().spawn(async move { + fuzzy::match_path_sets( + candidate_sets.as_slice(), + query.as_str(), + &None, + false, + 100, + &cancellation_flag, + executor, + ) + .await + .into_iter() + .map(|mat| FileMatch { + mat, + is_recent: false, + }) + .collect::>() + }) + } +} + +pub(crate) fn search_symbols( + query: String, + cancellation_flag: Arc, + workspace: &Entity, + cx: &mut App, +) -> Task> { + let symbols_task = workspace.update(cx, |workspace, cx| { + workspace + .project() + .update(cx, |project, cx| project.symbols(&query, cx)) + }); + let project = workspace.read(cx).project().clone(); + cx.spawn(async move |cx| { + let Some(symbols) = symbols_task.await.log_err() else { + return Vec::new(); + }; + let Some((visible_match_candidates, external_match_candidates)): Option<(Vec<_>, Vec<_>)> = + project + .update(cx, |project, cx| { + symbols + .iter() + .enumerate() + .map(|(id, symbol)| { + StringMatchCandidate::new(id, symbol.label.filter_text()) + }) + .partition(|candidate| match &symbols[candidate.id].path { + SymbolLocation::InProject(project_path) => project + .entry_for_path(project_path, cx) + .is_some_and(|e| !e.is_ignored), + SymbolLocation::OutsideProject { .. } => false, + }) + }) + .log_err() + else { + return Vec::new(); + }; + + const MAX_MATCHES: usize = 100; + let mut visible_matches = cx.background_executor().block(fuzzy::match_strings( + &visible_match_candidates, + &query, + false, + true, + MAX_MATCHES, + &cancellation_flag, + cx.background_executor().clone(), + )); + let mut external_matches = cx.background_executor().block(fuzzy::match_strings( + &external_match_candidates, + &query, + false, + true, + MAX_MATCHES - visible_matches.len().min(MAX_MATCHES), + &cancellation_flag, + cx.background_executor().clone(), + )); + let sort_key_for_match = |mat: &StringMatch| { + let symbol = &symbols[mat.candidate_id]; + (Reverse(OrderedFloat(mat.score)), symbol.label.filter_text()) + }; + + visible_matches.sort_unstable_by_key(sort_key_for_match); + external_matches.sort_unstable_by_key(sort_key_for_match); + let mut matches = visible_matches; + matches.append(&mut external_matches); + + matches + .into_iter() + .map(|mut mat| { + let symbol = symbols[mat.candidate_id].clone(); + let filter_start = symbol.label.filter_range.start; + for position in &mut mat.positions { + *position += filter_start; + } + SymbolMatch { symbol } + }) + .collect() + }) +} + +pub(crate) fn search_threads( + query: String, + cancellation_flag: Arc, + thread_store: &Entity, + cx: &mut App, +) -> Task> { + let threads = thread_store.read(cx).entries().collect(); + if query.is_empty() { + return Task::ready(threads); + } + + let executor = cx.background_executor().clone(); + cx.background_spawn(async move { + let candidates = threads + .iter() + .enumerate() + .map(|(id, thread)| StringMatchCandidate::new(id, thread.title())) + .collect::>(); + let matches = fuzzy::match_strings( + &candidates, + &query, + false, + true, + 100, + &cancellation_flag, + executor, + ) + .await; + + matches + .into_iter() + .map(|mat| threads[mat.candidate_id].clone()) + .collect() + }) +} + +pub(crate) fn search_rules( + query: String, + cancellation_flag: Arc, + prompt_store: &Entity, + cx: &mut App, +) -> Task> { + let search_task = prompt_store.read(cx).search(query, cancellation_flag, cx); + cx.background_spawn(async move { + search_task + .await + .into_iter() + .flat_map(|metadata| { + // Default prompts are filtered out as they are automatically included. + if metadata.default { + None + } else { + Some(RulesContextEntry { + prompt_id: metadata.id.user_id()?, + title: metadata.title?, + }) + } + }) + .collect::>() + }) +} + +pub struct SymbolMatch { + pub symbol: Symbol, +} + +pub struct FileMatch { + pub mat: PathMatch, + pub is_recent: bool, +} + +pub fn extract_file_name_and_directory( + path: &RelPath, + path_prefix: &RelPath, + path_style: PathStyle, +) -> (SharedString, Option) { + // If path is empty, this means we're matching with the root directory itself + // so we use the path_prefix as the name + if path.is_empty() && !path_prefix.is_empty() { + return (path_prefix.display(path_style).to_string().into(), None); + } + + let full_path = path_prefix.join(path); + let file_name = full_path.file_name().unwrap_or_default(); + let display_path = full_path.display(path_style); + let (directory, file_name) = display_path.split_at(display_path.len() - file_name.len()); + ( + file_name.to_string().into(), + Some(SharedString::new(directory)).filter(|dir| !dir.is_empty()), + ) +} + +fn build_code_label_for_path( + file: &str, + directory: Option<&str>, + line_number: Option, + label_max_chars: usize, + cx: &App, +) -> CodeLabel { + let variable_highlight_id = cx + .theme() + .syntax() + .highlight_id("variable") + .map(HighlightId); + let mut label = CodeLabelBuilder::default(); + + label.push_str(file, None); + label.push_str(" ", None); + + if let Some(directory) = directory { + let file_name_chars = file.chars().count(); + // Account for: file_name + space (ellipsis is handled by truncate_and_remove_front) + let directory_max_chars = label_max_chars + .saturating_sub(file_name_chars) + .saturating_sub(1); + let truncated_directory = truncate_and_remove_front(directory, directory_max_chars.max(5)); + label.push_str(&truncated_directory, variable_highlight_id); + } + if let Some(line_number) = line_number { + label.push_str(&format!(" L{}", line_number), variable_highlight_id); + } + label.build() +} + +fn selection_ranges( + workspace: &Entity, + cx: &mut App, +) -> Vec<(Entity, Range)> { + let Some(editor) = workspace + .read(cx) + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + else { + return Vec::new(); + }; + + editor.update(cx, |editor, cx| { + let selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); + + let buffer = editor.buffer().clone().read(cx); + let snapshot = buffer.snapshot(cx); + + selections + .into_iter() + .map(|s| snapshot.anchor_after(s.start)..snapshot.anchor_before(s.end)) + .flat_map(|range| { + let (start_buffer, start) = buffer.text_anchor_for_position(range.start, cx)?; + let (end_buffer, end) = buffer.text_anchor_for_position(range.end, cx)?; + if start_buffer != end_buffer { + return None; + } + Some((start_buffer, start..end)) + }) + .collect::>() + }) +} + #[cfg(test)] mod tests { use super::*; + #[test] + fn test_prompt_completion_parse() { + let supported_modes = vec![PromptContextType::File, PromptContextType::Symbol]; + + assert_eq!( + PromptCompletion::try_parse("/", 0, &supported_modes), + Some(PromptCompletion::SlashCommand(SlashCommandCompletion { + source_range: 0..1, + command: None, + argument: None, + })) + ); + + assert_eq!( + PromptCompletion::try_parse("@", 0, &supported_modes), + Some(PromptCompletion::Mention(MentionCompletion { + source_range: 0..1, + mode: None, + argument: None, + })) + ); + + assert_eq!( + PromptCompletion::try_parse("/test @file", 0, &supported_modes), + Some(PromptCompletion::Mention(MentionCompletion { + source_range: 6..11, + mode: Some(PromptContextType::File), + argument: None, + })) + ); + } + #[test] fn test_slash_command_completion_parse() { assert_eq!( @@ -1225,10 +1795,15 @@ mod tests { #[test] fn test_mention_completion_parse() { - assert_eq!(MentionCompletion::try_parse(true, "Lorem Ipsum", 0), None); + let supported_modes = vec![PromptContextType::File, PromptContextType::Symbol]; + + assert_eq!( + MentionCompletion::try_parse("Lorem Ipsum", 0, &supported_modes), + None + ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @", 0), + MentionCompletion::try_parse("Lorem @", 0, &supported_modes), Some(MentionCompletion { source_range: 6..7, mode: None, @@ -1237,52 +1812,52 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @file", 0), + MentionCompletion::try_parse("Lorem @file", 0, &supported_modes), Some(MentionCompletion { source_range: 6..11, - mode: Some(ContextPickerMode::File), + mode: Some(PromptContextType::File), argument: None, }) ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @file ", 0), + MentionCompletion::try_parse("Lorem @file ", 0, &supported_modes), Some(MentionCompletion { source_range: 6..12, - mode: Some(ContextPickerMode::File), + mode: Some(PromptContextType::File), argument: None, }) ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @file main.rs", 0), + MentionCompletion::try_parse("Lorem @file main.rs", 0, &supported_modes), Some(MentionCompletion { source_range: 6..19, - mode: Some(ContextPickerMode::File), + mode: Some(PromptContextType::File), argument: Some("main.rs".to_string()), }) ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @file main.rs ", 0), + MentionCompletion::try_parse("Lorem @file main.rs ", 0, &supported_modes), Some(MentionCompletion { source_range: 6..19, - mode: Some(ContextPickerMode::File), + mode: Some(PromptContextType::File), argument: Some("main.rs".to_string()), }) ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @file main.rs Ipsum", 0), + MentionCompletion::try_parse("Lorem @file main.rs Ipsum", 0, &supported_modes), Some(MentionCompletion { source_range: 6..19, - mode: Some(ContextPickerMode::File), + mode: Some(PromptContextType::File), argument: Some("main.rs".to_string()), }) ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @main", 0), + MentionCompletion::try_parse("Lorem @main", 0, &supported_modes), Some(MentionCompletion { source_range: 6..11, mode: None, @@ -1291,7 +1866,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @main ", 0), + MentionCompletion::try_parse("Lorem @main ", 0, &supported_modes), Some(MentionCompletion { source_range: 6..12, mode: None, @@ -1299,41 +1874,47 @@ mod tests { }) ); - assert_eq!(MentionCompletion::try_parse(true, "Lorem @main m", 0), None); + assert_eq!( + MentionCompletion::try_parse("Lorem @main m", 0, &supported_modes), + None + ); - assert_eq!(MentionCompletion::try_parse(true, "test@", 0), None); + assert_eq!( + MentionCompletion::try_parse("test@", 0, &supported_modes), + None + ); // Allowed non-file mentions assert_eq!( - MentionCompletion::try_parse(true, "Lorem @symbol main", 0), + MentionCompletion::try_parse("Lorem @symbol main", 0, &supported_modes), Some(MentionCompletion { source_range: 6..18, - mode: Some(ContextPickerMode::Symbol), + mode: Some(PromptContextType::Symbol), argument: Some("main".to_string()), }) ); // Disallowed non-file mentions assert_eq!( - MentionCompletion::try_parse(false, "Lorem @symbol main", 0), + MentionCompletion::try_parse("Lorem @symbol main", 0, &[PromptContextType::File]), None ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem@symbol", 0), + MentionCompletion::try_parse("Lorem@symbol", 0, &supported_modes), None, "Should not parse mention inside word" ); assert_eq!( - MentionCompletion::try_parse(true, "Lorem @ file", 0), + MentionCompletion::try_parse("Lorem @ file", 0, &supported_modes), None, "Should not parse with a space after @" ); assert_eq!( - MentionCompletion::try_parse(true, "@ file", 0), + MentionCompletion::try_parse("@ file", 0, &supported_modes), None, "Should not parse with a space after @ at the start of the line" ); diff --git a/crates/agent_ui/src/context.rs b/crates/agent_ui/src/context.rs index 0bbf4d45ee56bf8220987f52fd7a1f6aa0a73055..ad8c95ba3e61f8f51d2b70ce59d0c8a9451e6571 100644 --- a/crates/agent_ui/src/context.rs +++ b/crates/agent_ui/src/context.rs @@ -1,764 +1,10 @@ -use agent::outline; -use assistant_text_thread::TextThread; -use futures::future; -use futures::{FutureExt, future::Shared}; -use gpui::{App, AppContext as _, ElementId, Entity, SharedString, Task}; -use language::Buffer; +use crate::mention_set::Mention; +use gpui::{AppContext as _, Entity, Task}; use language_model::{LanguageModelImage, LanguageModelRequestMessage, MessageContent}; -use project::{Project, ProjectEntryId, ProjectPath, Worktree}; -use prompt_store::{PromptStore, UserPromptId}; -use ref_cast::RefCast; -use rope::Point; -use std::fmt::{self, Display, Formatter, Write as _}; -use std::hash::{Hash, Hasher}; -use std::path::PathBuf; -use std::{ops::Range, path::Path, sync::Arc}; -use text::{Anchor, OffsetRangeExt as _}; -use ui::IconName; -use util::markdown::MarkdownCodeBlock; -use util::rel_path::RelPath; -use util::{ResultExt as _, post_inc}; +use ui::App; +use util::ResultExt as _; -pub const RULES_ICON: IconName = IconName::Reader; - -pub enum ContextKind { - File, - Directory, - Symbol, - Selection, - FetchedUrl, - Thread, - TextThread, - Rules, - Image, -} - -impl ContextKind { - pub fn icon(&self) -> IconName { - match self { - ContextKind::File => IconName::File, - ContextKind::Directory => IconName::Folder, - ContextKind::Symbol => IconName::Code, - ContextKind::Selection => IconName::Reader, - ContextKind::FetchedUrl => IconName::ToolWeb, - ContextKind::Thread => IconName::Thread, - ContextKind::TextThread => IconName::TextThread, - ContextKind::Rules => RULES_ICON, - ContextKind::Image => IconName::Image, - } - } -} - -/// Handle for context that can be attached to a user message. -/// -/// This uses IDs that are stable enough for tracking renames and identifying when context has -/// already been added to the thread. To use this in a set, wrap it in `AgentContextKey` to opt in -/// to `PartialEq` and `Hash` impls that use the subset of the fields used for this stable identity. -#[derive(Debug, Clone)] -pub enum AgentContextHandle { - File(FileContextHandle), - Directory(DirectoryContextHandle), - Symbol(SymbolContextHandle), - Selection(SelectionContextHandle), - FetchedUrl(FetchedUrlContext), - Thread(ThreadContextHandle), - TextThread(TextThreadContextHandle), - Rules(RulesContextHandle), - Image(ImageContext), -} - -impl AgentContextHandle { - pub fn id(&self) -> ContextId { - match self { - Self::File(context) => context.context_id, - Self::Directory(context) => context.context_id, - Self::Symbol(context) => context.context_id, - Self::Selection(context) => context.context_id, - Self::FetchedUrl(context) => context.context_id, - Self::Thread(context) => context.context_id, - Self::TextThread(context) => context.context_id, - Self::Rules(context) => context.context_id, - Self::Image(context) => context.context_id, - } - } - - pub fn element_id(&self, name: SharedString) -> ElementId { - ElementId::NamedInteger(name, self.id().0) - } -} - -/// Loaded context that can be attached to a user message. This can be thought of as a -/// snapshot of the context along with an `AgentContextHandle`. -#[derive(Debug, Clone)] -pub enum AgentContext { - File(FileContext), - Directory(DirectoryContext), - Symbol(SymbolContext), - Selection(SelectionContext), - FetchedUrl(FetchedUrlContext), - Thread(ThreadContext), - TextThread(TextThreadContext), - Rules(RulesContext), - Image(ImageContext), -} - -impl AgentContext { - pub fn handle(&self) -> AgentContextHandle { - match self { - AgentContext::File(context) => AgentContextHandle::File(context.handle.clone()), - AgentContext::Directory(context) => { - AgentContextHandle::Directory(context.handle.clone()) - } - AgentContext::Symbol(context) => AgentContextHandle::Symbol(context.handle.clone()), - AgentContext::Selection(context) => { - AgentContextHandle::Selection(context.handle.clone()) - } - AgentContext::FetchedUrl(context) => AgentContextHandle::FetchedUrl(context.clone()), - AgentContext::Thread(context) => AgentContextHandle::Thread(context.handle.clone()), - AgentContext::TextThread(context) => { - AgentContextHandle::TextThread(context.handle.clone()) - } - AgentContext::Rules(context) => AgentContextHandle::Rules(context.handle.clone()), - AgentContext::Image(context) => AgentContextHandle::Image(context.clone()), - } - } -} - -/// ID created at time of context add, for use in ElementId. This is not the stable identity of a -/// context, instead that's handled by the `PartialEq` and `Hash` impls of `AgentContextKey`. -#[derive(Debug, Copy, Clone)] -pub struct ContextId(u64); - -impl ContextId { - pub fn zero() -> Self { - ContextId(0) - } - - fn for_lookup() -> Self { - ContextId(u64::MAX) - } - - pub fn post_inc(&mut self) -> Self { - Self(post_inc(&mut self.0)) - } -} - -/// File context provides the entire contents of a file. -/// -/// This holds an `Entity` so that file path renames affect its display and so that it can -/// be opened even if the file has been deleted. An alternative might be to use `ProjectEntryId`, -/// but then when deleted there is no path info or ability to open. -#[derive(Debug, Clone)] -pub struct FileContextHandle { - pub buffer: Entity, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct FileContext { - pub handle: FileContextHandle, - pub full_path: String, - pub text: SharedString, - pub is_outline: bool, -} - -impl FileContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.buffer == other.buffer - } - - pub fn hash_for_key(&self, state: &mut H) { - self.buffer.hash(state) - } - - pub fn project_path(&self, cx: &App) -> Option { - let file = self.buffer.read(cx).file()?; - Some(ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path().clone(), - }) - } - - fn load(self, cx: &App) -> Task> { - let buffer_ref = self.buffer.read(cx); - let Some(file) = buffer_ref.file() else { - log::error!("file context missing path"); - return Task::ready(None); - }; - let full_path = file.full_path(cx).to_string_lossy().into_owned(); - let rope = buffer_ref.as_rope().clone(); - let buffer = self.buffer.clone(); - - cx.spawn(async move |cx| { - let buffer_content = - outline::get_buffer_content_or_outline(buffer.clone(), Some(&full_path), &cx) - .await - .unwrap_or_else(|_| outline::BufferContent { - text: rope.to_string(), - is_outline: false, - }); - - let context = AgentContext::File(FileContext { - handle: self, - full_path, - text: buffer_content.text.into(), - is_outline: buffer_content.is_outline, - }); - Some(context) - }) - } -} - -impl Display for FileContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - MarkdownCodeBlock { - tag: &codeblock_tag(&self.full_path, None), - text: &self.text, - } - ) - } -} - -/// Directory contents provides the entire contents of text files in a directory. -/// -/// This has a `ProjectEntryId` so that it follows renames. -#[derive(Debug, Clone)] -pub struct DirectoryContextHandle { - pub entry_id: ProjectEntryId, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct DirectoryContext { - pub handle: DirectoryContextHandle, - pub full_path: String, - pub descendants: Vec, -} - -#[derive(Debug, Clone)] -pub struct DirectoryContextDescendant { - /// Path within the directory. - pub rel_path: Arc, - pub fenced_codeblock: SharedString, -} - -impl DirectoryContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.entry_id == other.entry_id - } - - pub fn hash_for_key(&self, state: &mut H) { - self.entry_id.hash(state) - } - - fn load(self, project: Entity, cx: &mut App) -> Task> { - let Some(worktree) = project.read(cx).worktree_for_entry(self.entry_id, cx) else { - return Task::ready(None); - }; - let worktree_ref = worktree.read(cx); - let Some(entry) = worktree_ref.entry_for_id(self.entry_id) else { - return Task::ready(None); - }; - if entry.is_file() { - log::error!("DirectoryContext unexpectedly refers to a file."); - return Task::ready(None); - } - - let directory_path = entry.path.clone(); - let directory_full_path = worktree_ref - .full_path(&directory_path) - .to_string_lossy() - .to_string(); - - let file_paths = collect_files_in_path(worktree_ref, &directory_path); - let descendants_future = future::join_all(file_paths.into_iter().map(|path| { - let worktree_ref = worktree.read(cx); - let worktree_id = worktree_ref.id(); - let full_path = worktree_ref.full_path(&path).to_string_lossy().into_owned(); - - let rel_path = path - .strip_prefix(&directory_path) - .log_err() - .map_or_else(|| path.clone(), |rel_path| rel_path.into()); - - let open_task = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - let project_path = ProjectPath { worktree_id, path }; - buffer_store.open_buffer(project_path, cx) - }) - }); - - // TODO: report load errors instead of just logging - let rope_task = cx.spawn(async move |cx| { - let buffer = open_task.await.log_err()?; - let rope = buffer - .read_with(cx, |buffer, _cx| buffer.as_rope().clone()) - .log_err()?; - Some((rope, buffer)) - }); - - cx.background_spawn(async move { - let (rope, _buffer) = rope_task.await?; - let fenced_codeblock = MarkdownCodeBlock { - tag: &codeblock_tag(&full_path, None), - text: &rope.to_string(), - } - .to_string() - .into(); - let descendant = DirectoryContextDescendant { - rel_path, - fenced_codeblock, - }; - Some(descendant) - }) - })); - - cx.background_spawn(async move { - let descendants = descendants_future - .await - .into_iter() - .flatten() - .collect::>(); - let context = AgentContext::Directory(DirectoryContext { - handle: self, - full_path: directory_full_path, - descendants, - }); - Some(context) - }) - } -} - -impl Display for DirectoryContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut is_first = true; - for descendant in &self.descendants { - if !is_first { - writeln!(f)?; - } else { - is_first = false; - } - write!(f, "{}", descendant.fenced_codeblock)?; - } - Ok(()) - } -} - -#[derive(Debug, Clone)] -pub struct SymbolContextHandle { - pub buffer: Entity, - pub symbol: SharedString, - pub range: Range, - /// The range that fully contains the symbol. e.g. for function symbol, this will include not - /// only the signature, but also the body. Not used by `PartialEq` or `Hash` for - /// `AgentContextKey`. - pub enclosing_range: Range, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct SymbolContext { - pub handle: SymbolContextHandle, - pub full_path: String, - pub line_range: Range, - pub text: SharedString, -} - -impl SymbolContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.buffer == other.buffer && self.symbol == other.symbol && self.range == other.range - } - - pub fn hash_for_key(&self, state: &mut H) { - self.buffer.hash(state); - self.symbol.hash(state); - self.range.hash(state); - } - - pub fn full_path(&self, cx: &App) -> Option { - Some(self.buffer.read(cx).file()?.full_path(cx)) - } - - pub fn enclosing_line_range(&self, cx: &App) -> Range { - self.enclosing_range - .to_point(&self.buffer.read(cx).snapshot()) - } - - pub fn text(&self, cx: &App) -> SharedString { - self.buffer - .read(cx) - .text_for_range(self.enclosing_range.clone()) - .collect::() - .into() - } - - fn load(self, cx: &App) -> Task> { - let buffer_ref = self.buffer.read(cx); - let Some(file) = buffer_ref.file() else { - log::error!("symbol context's file has no path"); - return Task::ready(None); - }; - let full_path = file.full_path(cx).to_string_lossy().into_owned(); - let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot()); - let text = self.text(cx); - let context = AgentContext::Symbol(SymbolContext { - handle: self, - full_path, - line_range, - text, - }); - Task::ready(Some(context)) - } -} - -impl Display for SymbolContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let code_block = MarkdownCodeBlock { - tag: &codeblock_tag(&self.full_path, Some(self.line_range.clone())), - text: &self.text, - }; - write!(f, "{code_block}",) - } -} - -#[derive(Debug, Clone)] -pub struct SelectionContextHandle { - pub buffer: Entity, - pub range: Range, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct SelectionContext { - pub handle: SelectionContextHandle, - pub full_path: String, - pub line_range: Range, - pub text: SharedString, -} - -impl SelectionContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.buffer == other.buffer && self.range == other.range - } - - pub fn hash_for_key(&self, state: &mut H) { - self.buffer.hash(state); - self.range.hash(state); - } - - pub fn full_path(&self, cx: &App) -> Option { - Some(self.buffer.read(cx).file()?.full_path(cx)) - } - - pub fn line_range(&self, cx: &App) -> Range { - self.range.to_point(&self.buffer.read(cx).snapshot()) - } - - pub fn text(&self, cx: &App) -> SharedString { - self.buffer - .read(cx) - .text_for_range(self.range.clone()) - .collect::() - .into() - } - - fn load(self, cx: &App) -> Task> { - let Some(full_path) = self.full_path(cx) else { - log::error!("selection context's file has no path"); - return Task::ready(None); - }; - let text = self.text(cx); - let context = AgentContext::Selection(SelectionContext { - full_path: full_path.to_string_lossy().into_owned(), - line_range: self.line_range(cx), - text, - handle: self, - }); - - Task::ready(Some(context)) - } -} - -impl Display for SelectionContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let code_block = MarkdownCodeBlock { - tag: &codeblock_tag(&self.full_path, Some(self.line_range.clone())), - text: &self.text, - }; - write!(f, "{code_block}",) - } -} - -#[derive(Debug, Clone)] -pub struct FetchedUrlContext { - pub url: SharedString, - /// Text contents of the fetched url. Unlike other context types, the contents of this gets - /// populated when added rather than when sending the message. Not used by `PartialEq` or `Hash` - /// for `AgentContextKey`. - pub text: SharedString, - pub context_id: ContextId, -} - -impl FetchedUrlContext { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.url == other.url - } - - pub fn hash_for_key(&self, state: &mut H) { - self.url.hash(state); - } - - pub fn lookup_key(url: SharedString) -> AgentContextKey { - AgentContextKey(AgentContextHandle::FetchedUrl(FetchedUrlContext { - url, - text: "".into(), - context_id: ContextId::for_lookup(), - })) - } - - pub fn load(self) -> Task> { - Task::ready(Some(AgentContext::FetchedUrl(self))) - } -} - -impl Display for FetchedUrlContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - // TODO: Better format - url and contents are not delimited. - write!(f, "{}\n{}\n", self.url, self.text) - } -} - -#[derive(Debug, Clone)] -pub struct ThreadContextHandle { - pub thread: Entity, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct ThreadContext { - pub handle: ThreadContextHandle, - pub title: SharedString, - pub text: SharedString, -} - -impl ThreadContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.thread == other.thread - } - - pub fn hash_for_key(&self, state: &mut H) { - self.thread.hash(state) - } - - pub fn title(&self, cx: &App) -> SharedString { - self.thread.read(cx).title() - } - - fn load(self, cx: &mut App) -> Task> { - let task = self.thread.update(cx, |thread, cx| thread.summary(cx)); - let title = self.title(cx); - cx.background_spawn(async move { - let text = task.await?; - let context = AgentContext::Thread(ThreadContext { - title, - text, - handle: self, - }); - Some(context) - }) - } -} - -impl Display for ThreadContext { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - // TODO: Better format for this - doesn't distinguish title and contents. - write!(f, "{}\n{}\n", &self.title, &self.text.trim()) - } -} - -#[derive(Debug, Clone)] -pub struct TextThreadContextHandle { - pub text_thread: Entity, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct TextThreadContext { - pub handle: TextThreadContextHandle, - pub title: SharedString, - pub text: SharedString, -} - -impl TextThreadContextHandle { - // pub fn lookup_key() -> - pub fn eq_for_key(&self, other: &Self) -> bool { - self.text_thread == other.text_thread - } - - pub fn hash_for_key(&self, state: &mut H) { - self.text_thread.hash(state) - } - - pub fn title(&self, cx: &App) -> SharedString { - self.text_thread.read(cx).summary().or_default() - } - - fn load(self, cx: &App) -> Task> { - let title = self.title(cx); - let text = self.text_thread.read(cx).to_xml(cx); - let context = AgentContext::TextThread(TextThreadContext { - title, - text: text.into(), - handle: self, - }); - Task::ready(Some(context)) - } -} - -impl Display for TextThreadContext { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, " write!(f, "&")?, - '<' => write!(f, "<")?, - '>' => write!(f, ">")?, - '"' => write!(f, """)?, - '\'' => write!(f, "'")?, - _ => write!(f, "{}", c)?, - } - } - writeln!(f, "\">")?; - write!(f, "{}", self.text.trim())?; - write!(f, "\n") - } -} - -#[derive(Debug, Clone)] -pub struct RulesContextHandle { - pub prompt_id: UserPromptId, - pub context_id: ContextId, -} - -#[derive(Debug, Clone)] -pub struct RulesContext { - pub handle: RulesContextHandle, - pub title: Option, - pub text: SharedString, -} - -impl RulesContextHandle { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.prompt_id == other.prompt_id - } - - pub fn hash_for_key(&self, state: &mut H) { - self.prompt_id.hash(state) - } - - pub fn lookup_key(prompt_id: UserPromptId) -> AgentContextKey { - AgentContextKey(AgentContextHandle::Rules(RulesContextHandle { - prompt_id, - context_id: ContextId::for_lookup(), - })) - } - - pub fn load( - self, - prompt_store: &Option>, - cx: &App, - ) -> Task> { - let Some(prompt_store) = prompt_store.as_ref() else { - return Task::ready(None); - }; - let prompt_store = prompt_store.read(cx); - let prompt_id = self.prompt_id.into(); - let Some(metadata) = prompt_store.metadata(prompt_id) else { - return Task::ready(None); - }; - let title = metadata.title; - let text_task = prompt_store.load(prompt_id, cx); - cx.background_spawn(async move { - // TODO: report load errors instead of just logging - let text = text_task.await.log_err()?.into(); - let context = AgentContext::Rules(RulesContext { - handle: self, - title, - text, - }); - Some(context) - }) - } -} - -impl Display for RulesContext { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(title) = &self.title { - writeln!(f, "Rules title: {}", title)?; - } - let code_block = MarkdownCodeBlock { - tag: "", - text: self.text.trim(), - }; - write!(f, "{code_block}") - } -} - -#[derive(Debug, Clone)] -pub struct ImageContext { - pub project_path: Option, - pub full_path: Option, - pub original_image: Arc, - // TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml - // needed due to a false positive of `clippy::mutable_key_type`. - pub image_task: Shared>>, - pub context_id: ContextId, -} - -pub enum ImageStatus { - Loading, - Error, - Warning, - Ready, -} - -impl ImageContext { - pub fn eq_for_key(&self, other: &Self) -> bool { - self.original_image.id() == other.original_image.id() - } - - pub fn hash_for_key(&self, state: &mut H) { - self.original_image.id().hash(state); - } - - pub fn image(&self) -> Option { - self.image_task.clone().now_or_never().flatten() - } - - pub fn status(&self, model: Option<&Arc>) -> ImageStatus { - match self.image_task.clone().now_or_never() { - None => ImageStatus::Loading, - Some(None) => ImageStatus::Error, - Some(Some(_)) => { - if model.is_some_and(|model| !model.supports_images()) { - ImageStatus::Warning - } else { - ImageStatus::Ready - } - } - } - } - - pub fn load(self, cx: &App) -> Task> { - cx.background_spawn(async move { - self.image_task.clone().await; - Some(AgentContext::Image(self)) - }) - } -} +use crate::mention_set::MentionSet; #[derive(Debug, Clone, Default)] pub struct LoadedContext { @@ -792,382 +38,26 @@ impl LoadedContext { } /// Loads and formats a collection of contexts. -pub fn load_context( - contexts: Vec, - project: &Entity, - prompt_store: &Option>, - cx: &mut App, -) -> Task { - let load_tasks: Vec<_> = contexts - .into_iter() - .map(|context| match context { - AgentContextHandle::File(context) => context.load(cx), - AgentContextHandle::Directory(context) => context.load(project.clone(), cx), - AgentContextHandle::Symbol(context) => context.load(cx), - AgentContextHandle::Selection(context) => context.load(cx), - AgentContextHandle::FetchedUrl(context) => context.load(), - AgentContextHandle::Thread(context) => context.load(cx), - AgentContextHandle::TextThread(context) => context.load(cx), - AgentContextHandle::Rules(context) => context.load(prompt_store, cx), - AgentContextHandle::Image(context) => context.load(cx), - }) - .collect(); - +pub fn load_context(mention_set: &Entity, cx: &mut App) -> Task> { + let task = mention_set.update(cx, |mention_set, cx| mention_set.contents(true, cx)); cx.background_spawn(async move { - let load_results = future::join_all(load_tasks).await; - - let mut text = String::new(); - - let mut file_context = Vec::new(); - let mut directory_context = Vec::new(); - let mut symbol_context = Vec::new(); - let mut selection_context = Vec::new(); - let mut fetched_url_context = Vec::new(); - let mut thread_context = Vec::new(); - let mut text_thread_context = Vec::new(); - let mut rules_context = Vec::new(); - let mut images = Vec::new(); - for context in load_results.into_iter().flatten() { - match context { - AgentContext::File(context) => file_context.push(context), - AgentContext::Directory(context) => directory_context.push(context), - AgentContext::Symbol(context) => symbol_context.push(context), - AgentContext::Selection(context) => selection_context.push(context), - AgentContext::FetchedUrl(context) => fetched_url_context.push(context), - AgentContext::Thread(context) => thread_context.push(context), - AgentContext::TextThread(context) => text_thread_context.push(context), - AgentContext::Rules(context) => rules_context.push(context), - AgentContext::Image(context) => images.extend(context.image()), - } - } - - // Use empty text if there are no contexts that contribute to text (everything but image - // context). - if file_context.is_empty() - && directory_context.is_empty() - && symbol_context.is_empty() - && selection_context.is_empty() - && fetched_url_context.is_empty() - && thread_context.is_empty() - && text_thread_context.is_empty() - && rules_context.is_empty() - { - return LoadedContext { text, images }; - } - - text.push_str( - "\n\n\ - The following items were attached by the user. \ - They are up-to-date and don't need to be re-read.\n\n", - ); - - if !file_context.is_empty() { - text.push_str(""); - for context in file_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !directory_context.is_empty() { - text.push_str(""); - for context in directory_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !symbol_context.is_empty() { - text.push_str(""); - for context in symbol_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !selection_context.is_empty() { - text.push_str(""); - for context in selection_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !fetched_url_context.is_empty() { - text.push_str(""); - for context in fetched_url_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !thread_context.is_empty() { - text.push_str(""); - for context in thread_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - if !text_thread_context.is_empty() { - text.push_str(""); - for context in text_thread_context { - text.push('\n'); - let _ = writeln!(text, "{context}"); - } - text.push_str(""); - } - - if !rules_context.is_empty() { - text.push_str( - "\n\ - The user has specified the following rules that should be applied:\n", - ); - for context in rules_context { - text.push('\n'); - let _ = write!(text, "{context}"); - } - text.push_str("\n"); - } - - text.push_str("\n"); - - LoadedContext { text, images } + let mentions = task.await.log_err()?; + let mut loaded_context = LoadedContext::default(); + loaded_context + .text + .push_str("The following items were attached by the user.\n"); + for (_, (_, mention)) in mentions { + match mention { + Mention::Text { content, .. } => { + loaded_context.text.push_str(&content); + } + Mention::Image(mention_image) => loaded_context.images.push(LanguageModelImage { + source: mention_image.data, + ..LanguageModelImage::empty() + }), + Mention::Link => {} + } + } + Some(loaded_context) }) } - -fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec> { - let mut files = Vec::new(); - - for entry in worktree.child_entries(path) { - if entry.is_dir() { - files.extend(collect_files_in_path(worktree, &entry.path)); - } else if entry.is_file() { - files.push(entry.path.clone()); - } - } - - files -} - -fn codeblock_tag(full_path: &str, line_range: Option>) -> String { - let mut result = String::new(); - - if let Some(extension) = Path::new(full_path) - .extension() - .and_then(|ext| ext.to_str()) - { - let _ = write!(result, "{} ", extension); - } - - let _ = write!(result, "{}", full_path); - - if let Some(range) = line_range { - if range.start.row == range.end.row { - let _ = write!(result, ":{}", range.start.row + 1); - } else { - let _ = write!(result, ":{}-{}", range.start.row + 1, range.end.row + 1); - } - } - - result -} - -/// Wraps `AgentContext` to opt-in to `PartialEq` and `Hash` impls which use a subset of fields -/// needed for stable context identity. -#[derive(Debug, Clone, RefCast)] -#[repr(transparent)] -pub struct AgentContextKey(pub AgentContextHandle); - -impl AsRef for AgentContextKey { - fn as_ref(&self) -> &AgentContextHandle { - &self.0 - } -} - -impl Eq for AgentContextKey {} - -impl PartialEq for AgentContextKey { - fn eq(&self, other: &Self) -> bool { - match &self.0 { - AgentContextHandle::File(context) => { - if let AgentContextHandle::File(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Directory(context) => { - if let AgentContextHandle::Directory(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Symbol(context) => { - if let AgentContextHandle::Symbol(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Selection(context) => { - if let AgentContextHandle::Selection(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::FetchedUrl(context) => { - if let AgentContextHandle::FetchedUrl(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Thread(context) => { - if let AgentContextHandle::Thread(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Rules(context) => { - if let AgentContextHandle::Rules(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::Image(context) => { - if let AgentContextHandle::Image(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - AgentContextHandle::TextThread(context) => { - if let AgentContextHandle::TextThread(other_context) = &other.0 { - return context.eq_for_key(other_context); - } - } - } - false - } -} - -impl Hash for AgentContextKey { - fn hash(&self, state: &mut H) { - match &self.0 { - AgentContextHandle::File(context) => context.hash_for_key(state), - AgentContextHandle::Directory(context) => context.hash_for_key(state), - AgentContextHandle::Symbol(context) => context.hash_for_key(state), - AgentContextHandle::Selection(context) => context.hash_for_key(state), - AgentContextHandle::FetchedUrl(context) => context.hash_for_key(state), - AgentContextHandle::Thread(context) => context.hash_for_key(state), - AgentContextHandle::TextThread(context) => context.hash_for_key(state), - AgentContextHandle::Rules(context) => context.hash_for_key(state), - AgentContextHandle::Image(context) => context.hash_for_key(state), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::TestAppContext; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - - fn init_test_settings(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); - } - - // Helper to create a test project with test files - async fn create_test_project( - cx: &mut TestAppContext, - files: serde_json::Value, - ) -> Entity { - let fs = FakeFs::new(cx.background_executor.clone()); - fs.insert_tree(path!("/test"), files).await; - Project::test(fs, [path!("/test").as_ref()], cx).await - } - - #[gpui::test] - async fn test_large_file_uses_outline(cx: &mut TestAppContext) { - init_test_settings(cx); - - // Create a large file that exceeds AUTO_OUTLINE_SIZE - const LINE: &str = "Line with some text\n"; - let large_content = LINE.repeat(2 * (outline::AUTO_OUTLINE_SIZE / LINE.len())); - let content_len = large_content.len(); - - assert!(content_len > outline::AUTO_OUTLINE_SIZE); - - let file_context = load_context_for("file.txt", large_content, cx).await; - - assert!( - file_context - .text - .contains(&format!("# File outline for {}", path!("test/file.txt"))), - "Large files should not get an outline" - ); - - assert!( - file_context.text.len() < content_len, - "Outline should be smaller than original content" - ); - } - - #[gpui::test] - async fn test_small_file_uses_full_content(cx: &mut TestAppContext) { - init_test_settings(cx); - - let small_content = "This is a small file.\n"; - let content_len = small_content.len(); - - assert!(content_len < outline::AUTO_OUTLINE_SIZE); - - let file_context = load_context_for("file.txt", small_content.to_string(), cx).await; - - assert!( - !file_context - .text - .contains(&format!("# File outline for {}", path!("test/file.txt"))), - "Small files should not get an outline" - ); - - assert!( - file_context.text.contains(small_content), - "Small files should use full content" - ); - } - - async fn load_context_for( - filename: &str, - content: String, - cx: &mut TestAppContext, - ) -> LoadedContext { - // Create a test project with the file - let project = create_test_project( - cx, - json!({ - filename: content, - }), - ) - .await; - - // Open the buffer - let buffer_path = project - .read_with(cx, |project, cx| project.find_project_path(filename, cx)) - .unwrap(); - - let buffer = project - .update(cx, |project, cx| project.open_buffer(buffer_path, cx)) - .await - .unwrap(); - - let context_handle = AgentContextHandle::File(FileContextHandle { - buffer: buffer.clone(), - context_id: ContextId::zero(), - }); - - cx.update(|cx| load_context(vec![context_handle], &project, &None, cx)) - .await - } -} diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs deleted file mode 100644 index 0a6e811673aa47339087e538003e87b1940d0039..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker.rs +++ /dev/null @@ -1,931 +0,0 @@ -mod completion_provider; -pub(crate) mod fetch_context_picker; -pub(crate) mod file_context_picker; -pub(crate) mod rules_context_picker; -pub(crate) mod symbol_context_picker; -pub(crate) mod thread_context_picker; - -use std::ops::Range; -use std::path::PathBuf; -use std::sync::Arc; - -use agent::{HistoryEntry, HistoryEntryId, HistoryStore}; -use agent_client_protocol as acp; -use anyhow::{Result, anyhow}; -use collections::HashSet; -pub use completion_provider::ContextPickerCompletionProvider; -use editor::display_map::{Crease, CreaseId, CreaseMetadata, FoldId}; -use editor::{Anchor, Editor, ExcerptId, FoldPlaceholder, ToOffset}; -use fetch_context_picker::FetchContextPicker; -use file_context_picker::FileContextPicker; -use file_context_picker::render_file_context_entry; -use gpui::{ - App, DismissEvent, Empty, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, - WeakEntity, -}; -use language::Buffer; -use multi_buffer::MultiBufferRow; -use project::ProjectPath; -use prompt_store::PromptStore; -use rules_context_picker::{RulesContextEntry, RulesContextPicker}; -use symbol_context_picker::SymbolContextPicker; -use thread_context_picker::render_thread_context_entry; -use ui::{ - ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor, prelude::*, -}; -use util::paths::PathStyle; -use util::rel_path::RelPath; -use workspace::{Workspace, notifications::NotifyResultExt}; - -use crate::context_picker::thread_context_picker::ThreadContextPicker; -use crate::{context::RULES_ICON, context_store::ContextStore}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum ContextPickerEntry { - Mode(ContextPickerMode), - Action(ContextPickerAction), -} - -impl ContextPickerEntry { - pub fn keyword(&self) -> &'static str { - match self { - Self::Mode(mode) => mode.keyword(), - Self::Action(action) => action.keyword(), - } - } - - pub fn label(&self) -> &'static str { - match self { - Self::Mode(mode) => mode.label(), - Self::Action(action) => action.label(), - } - } - - pub fn icon(&self) -> IconName { - match self { - Self::Mode(mode) => mode.icon(), - Self::Action(action) => action.icon(), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum ContextPickerMode { - File, - Symbol, - Fetch, - Thread, - Rules, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum ContextPickerAction { - AddSelections, -} - -impl ContextPickerAction { - pub fn keyword(&self) -> &'static str { - match self { - Self::AddSelections => "selection", - } - } - - pub fn label(&self) -> &'static str { - match self { - Self::AddSelections => "Selection", - } - } - - pub fn icon(&self) -> IconName { - match self { - Self::AddSelections => IconName::Reader, - } - } -} - -impl TryFrom<&str> for ContextPickerMode { - type Error = String; - - fn try_from(value: &str) -> Result { - match value { - "file" => Ok(Self::File), - "symbol" => Ok(Self::Symbol), - "fetch" => Ok(Self::Fetch), - "thread" => Ok(Self::Thread), - "rule" => Ok(Self::Rules), - _ => Err(format!("Invalid context picker mode: {}", value)), - } - } -} - -impl ContextPickerMode { - pub fn keyword(&self) -> &'static str { - match self { - Self::File => "file", - Self::Symbol => "symbol", - Self::Fetch => "fetch", - Self::Thread => "thread", - Self::Rules => "rule", - } - } - - pub fn label(&self) -> &'static str { - match self { - Self::File => "Files & Directories", - Self::Symbol => "Symbols", - Self::Fetch => "Fetch", - Self::Thread => "Threads", - Self::Rules => "Rules", - } - } - - pub fn icon(&self) -> IconName { - match self { - Self::File => IconName::File, - Self::Symbol => IconName::Code, - Self::Fetch => IconName::ToolWeb, - Self::Thread => IconName::Thread, - Self::Rules => RULES_ICON, - } - } -} - -#[derive(Debug, Clone)] -enum ContextPickerState { - Default(Entity), - File(Entity), - Symbol(Entity), - Fetch(Entity), - Thread(Entity), - Rules(Entity), -} - -pub(super) struct ContextPicker { - mode: ContextPickerState, - workspace: WeakEntity, - context_store: WeakEntity, - thread_store: Option>, - prompt_store: Option>, - _subscriptions: Vec, -} - -impl ContextPicker { - pub fn new( - workspace: WeakEntity, - thread_store: Option>, - prompt_store: Option>, - context_store: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let subscriptions = context_store - .upgrade() - .map(|context_store| { - cx.observe(&context_store, |this, _, cx| this.notify_current_picker(cx)) - }) - .into_iter() - .chain( - thread_store - .as_ref() - .and_then(|thread_store| thread_store.upgrade()) - .map(|thread_store| { - cx.observe(&thread_store, |this, _, cx| this.notify_current_picker(cx)) - }), - ) - .collect::>(); - - ContextPicker { - mode: ContextPickerState::Default(ContextMenu::build( - window, - cx, - |menu, _window, _cx| menu, - )), - workspace, - context_store, - thread_store, - prompt_store, - _subscriptions: subscriptions, - } - } - - pub fn init(&mut self, window: &mut Window, cx: &mut Context) { - self.mode = ContextPickerState::Default(self.build_menu(window, cx)); - cx.notify(); - } - - fn build_menu(&mut self, window: &mut Window, cx: &mut Context) -> Entity { - let context_picker = cx.entity(); - - let menu = ContextMenu::build(window, cx, move |menu, _window, cx| { - let Some(workspace) = self.workspace.upgrade() else { - return menu; - }; - let path_style = workspace.read(cx).path_style(cx); - let recent = self.recent_entries(cx); - let has_recent = !recent.is_empty(); - let recent_entries = recent - .into_iter() - .enumerate() - .map(|(ix, entry)| { - self.recent_menu_item(context_picker.clone(), ix, entry, path_style) - }) - .collect::>(); - - let entries = self - .workspace - .upgrade() - .map(|workspace| { - available_context_picker_entries( - &self.prompt_store, - &self.thread_store, - &workspace, - cx, - ) - }) - .unwrap_or_default(); - - menu.when(has_recent, |menu| { - menu.custom_row(|_, _| { - div() - .mb_1() - .child( - Label::new("Recent") - .color(Color::Muted) - .size(LabelSize::Small), - ) - .into_any_element() - }) - }) - .extend(recent_entries) - .when(has_recent, |menu| menu.separator()) - .extend(entries.into_iter().map(|entry| { - let context_picker = context_picker.clone(); - - ContextMenuEntry::new(entry.label()) - .icon(entry.icon()) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .handler(move |window, cx| { - context_picker.update(cx, |this, cx| this.select_entry(entry, window, cx)) - }) - })) - .keep_open_on_confirm(true) - }); - - cx.subscribe(&menu, move |_, _, _: &DismissEvent, cx| { - cx.emit(DismissEvent); - }) - .detach(); - - menu - } - - /// Whether threads are allowed as context. - pub fn allow_threads(&self) -> bool { - self.thread_store.is_some() - } - - fn select_entry( - &mut self, - entry: ContextPickerEntry, - window: &mut Window, - cx: &mut Context, - ) { - let context_picker = cx.entity().downgrade(); - - match entry { - ContextPickerEntry::Mode(mode) => match mode { - ContextPickerMode::File => { - self.mode = ContextPickerState::File(cx.new(|cx| { - FileContextPicker::new( - context_picker.clone(), - self.workspace.clone(), - self.context_store.clone(), - window, - cx, - ) - })); - } - ContextPickerMode::Symbol => { - self.mode = ContextPickerState::Symbol(cx.new(|cx| { - SymbolContextPicker::new( - context_picker.clone(), - self.workspace.clone(), - self.context_store.clone(), - window, - cx, - ) - })); - } - ContextPickerMode::Rules => { - if let Some(prompt_store) = self.prompt_store.as_ref() { - self.mode = ContextPickerState::Rules(cx.new(|cx| { - RulesContextPicker::new( - prompt_store.clone(), - context_picker.clone(), - self.context_store.clone(), - window, - cx, - ) - })); - } - } - ContextPickerMode::Fetch => { - self.mode = ContextPickerState::Fetch(cx.new(|cx| { - FetchContextPicker::new( - context_picker.clone(), - self.workspace.clone(), - self.context_store.clone(), - window, - cx, - ) - })); - } - ContextPickerMode::Thread => { - if let Some(thread_store) = self.thread_store.clone() { - self.mode = ContextPickerState::Thread(cx.new(|cx| { - ThreadContextPicker::new( - thread_store, - context_picker.clone(), - self.context_store.clone(), - self.workspace.clone(), - window, - cx, - ) - })); - } - } - }, - ContextPickerEntry::Action(action) => match action { - ContextPickerAction::AddSelections => { - if let Some((context_store, workspace)) = - self.context_store.upgrade().zip(self.workspace.upgrade()) - { - add_selections_as_context(&context_store, &workspace, cx); - } - - cx.emit(DismissEvent); - } - }, - } - - cx.notify(); - cx.focus_self(window); - } - - pub fn select_first(&mut self, window: &mut Window, cx: &mut Context) { - // Other variants already select their first entry on open automatically - if let ContextPickerState::Default(entity) = &self.mode { - entity.update(cx, |entity, cx| { - entity.select_first(&Default::default(), window, cx) - }) - } - } - - fn recent_menu_item( - &self, - context_picker: Entity, - ix: usize, - entry: RecentEntry, - path_style: PathStyle, - ) -> ContextMenuItem { - match entry { - RecentEntry::File { - project_path, - path_prefix, - } => { - let context_store = self.context_store.clone(); - let worktree_id = project_path.worktree_id; - let path = project_path.path.clone(); - - ContextMenuItem::custom_entry( - move |_window, cx| { - render_file_context_entry( - ElementId::named_usize("ctx-recent", ix), - worktree_id, - &path, - &path_prefix, - false, - path_style, - context_store.clone(), - cx, - ) - .into_any() - }, - move |window, cx| { - context_picker.update(cx, |this, cx| { - this.add_recent_file(project_path.clone(), window, cx); - }) - }, - None, - ) - } - RecentEntry::Thread(thread) => { - let context_store = self.context_store.clone(); - let view_thread = thread.clone(); - - ContextMenuItem::custom_entry( - move |_window, cx| { - render_thread_context_entry(&view_thread, context_store.clone(), cx) - .into_any() - }, - move |window, cx| { - context_picker.update(cx, |this, cx| { - this.add_recent_thread(thread.clone(), window, cx) - .detach_and_log_err(cx); - }) - }, - None, - ) - } - } - } - - fn add_recent_file( - &self, - project_path: ProjectPath, - window: &mut Window, - cx: &mut Context, - ) { - let Some(context_store) = self.context_store.upgrade() else { - return; - }; - - let task = context_store.update(cx, |context_store, cx| { - context_store.add_file_from_path(project_path.clone(), true, cx) - }); - - cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx)) - .detach(); - - cx.notify(); - } - - fn add_recent_thread( - &self, - entry: HistoryEntry, - _window: &mut Window, - cx: &mut Context, - ) -> Task> { - let Some(context_store) = self.context_store.upgrade() else { - return Task::ready(Err(anyhow!("context store not available"))); - }; - let Some(project) = self - .workspace - .upgrade() - .map(|workspace| workspace.read(cx).project().clone()) - else { - return Task::ready(Err(anyhow!("project not available"))); - }; - - match entry { - HistoryEntry::AcpThread(thread) => { - let Some(thread_store) = self - .thread_store - .as_ref() - .and_then(|thread_store| thread_store.upgrade()) - else { - return Task::ready(Err(anyhow!("thread store not available"))); - }; - let load_thread_task = - agent::load_agent_thread(thread.id, thread_store, project, cx); - cx.spawn(async move |this, cx| { - let thread = load_thread_task.await?; - context_store.update(cx, |context_store, cx| { - context_store.add_thread(thread, true, cx); - })?; - this.update(cx, |_this, cx| cx.notify()) - }) - } - HistoryEntry::TextThread(thread) => { - let Some(thread_store) = self - .thread_store - .as_ref() - .and_then(|thread_store| thread_store.upgrade()) - else { - return Task::ready(Err(anyhow!("text thread store not available"))); - }; - - let task = thread_store.update(cx, |this, cx| { - this.load_text_thread(thread.path.clone(), cx) - }); - cx.spawn(async move |this, cx| { - let thread = task.await?; - context_store.update(cx, |context_store, cx| { - context_store.add_text_thread(thread, true, cx); - })?; - this.update(cx, |_this, cx| cx.notify()) - }) - } - } - } - - fn recent_entries(&self, cx: &mut App) -> Vec { - let Some(workspace) = self.workspace.upgrade() else { - return vec![]; - }; - - let Some(context_store) = self.context_store.upgrade() else { - return vec![]; - }; - - recent_context_picker_entries_with_store( - context_store, - self.thread_store.clone(), - workspace, - None, - cx, - ) - } - - fn notify_current_picker(&mut self, cx: &mut Context) { - match &self.mode { - ContextPickerState::Default(entity) => entity.update(cx, |_, cx| cx.notify()), - ContextPickerState::File(entity) => entity.update(cx, |_, cx| cx.notify()), - ContextPickerState::Symbol(entity) => entity.update(cx, |_, cx| cx.notify()), - ContextPickerState::Fetch(entity) => entity.update(cx, |_, cx| cx.notify()), - ContextPickerState::Thread(entity) => entity.update(cx, |_, cx| cx.notify()), - ContextPickerState::Rules(entity) => entity.update(cx, |_, cx| cx.notify()), - } - } -} - -impl EventEmitter for ContextPicker {} - -impl Focusable for ContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - match &self.mode { - ContextPickerState::Default(menu) => menu.focus_handle(cx), - ContextPickerState::File(file_picker) => file_picker.focus_handle(cx), - ContextPickerState::Symbol(symbol_picker) => symbol_picker.focus_handle(cx), - ContextPickerState::Fetch(fetch_picker) => fetch_picker.focus_handle(cx), - ContextPickerState::Thread(thread_picker) => thread_picker.focus_handle(cx), - ContextPickerState::Rules(user_rules_picker) => user_rules_picker.focus_handle(cx), - } - } -} - -impl Render for ContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - v_flex() - .w(px(400.)) - .min_w(px(400.)) - .map(|parent| match &self.mode { - ContextPickerState::Default(menu) => parent.child(menu.clone()), - ContextPickerState::File(file_picker) => parent.child(file_picker.clone()), - ContextPickerState::Symbol(symbol_picker) => parent.child(symbol_picker.clone()), - ContextPickerState::Fetch(fetch_picker) => parent.child(fetch_picker.clone()), - ContextPickerState::Thread(thread_picker) => parent.child(thread_picker.clone()), - ContextPickerState::Rules(user_rules_picker) => { - parent.child(user_rules_picker.clone()) - } - }) - } -} - -pub(crate) enum RecentEntry { - File { - project_path: ProjectPath, - path_prefix: Arc, - }, - Thread(HistoryEntry), -} - -pub(crate) fn available_context_picker_entries( - prompt_store: &Option>, - thread_store: &Option>, - workspace: &Entity, - cx: &mut App, -) -> Vec { - let mut entries = vec![ - ContextPickerEntry::Mode(ContextPickerMode::File), - ContextPickerEntry::Mode(ContextPickerMode::Symbol), - ]; - - let has_selection = workspace - .read(cx) - .active_item(cx) - .and_then(|item| item.downcast::()) - .is_some_and(|editor| { - editor.update(cx, |editor, cx| { - editor.has_non_empty_selection(&editor.display_snapshot(cx)) - }) - }); - if has_selection { - entries.push(ContextPickerEntry::Action( - ContextPickerAction::AddSelections, - )); - } - - if thread_store.is_some() { - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Thread)); - } - - if prompt_store.is_some() { - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules)); - } - - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch)); - - entries -} - -fn recent_context_picker_entries_with_store( - context_store: Entity, - thread_store: Option>, - workspace: Entity, - exclude_path: Option, - cx: &App, -) -> Vec { - let project = workspace.read(cx).project(); - - let mut exclude_paths = context_store.read(cx).file_paths(cx); - exclude_paths.extend(exclude_path); - - let exclude_paths = exclude_paths - .into_iter() - .filter_map(|project_path| project.read(cx).absolute_path(&project_path, cx)) - .collect(); - - let exclude_threads = context_store.read(cx).thread_ids(); - - recent_context_picker_entries(thread_store, workspace, &exclude_paths, exclude_threads, cx) -} - -pub(crate) fn recent_context_picker_entries( - thread_store: Option>, - workspace: Entity, - exclude_paths: &HashSet, - exclude_threads: &HashSet, - cx: &App, -) -> Vec { - let mut recent = Vec::with_capacity(6); - let workspace = workspace.read(cx); - let project = workspace.project().read(cx); - let include_root_name = workspace.visible_worktrees(cx).count() > 1; - - recent.extend( - workspace - .recent_navigation_history_iter(cx) - .filter(|(_, abs_path)| { - abs_path - .as_ref() - .is_none_or(|path| !exclude_paths.contains(path.as_path())) - }) - .take(4) - .filter_map(|(project_path, _)| { - project - .worktree_for_id(project_path.worktree_id, cx) - .map(|worktree| { - let path_prefix = if include_root_name { - worktree.read(cx).root_name().into() - } else { - RelPath::empty().into() - }; - RecentEntry::File { - project_path, - path_prefix, - } - }) - }), - ); - - if let Some(thread_store) = thread_store.and_then(|store| store.upgrade()) { - const RECENT_THREADS_COUNT: usize = 2; - recent.extend( - thread_store - .read(cx) - .recently_opened_entries(cx) - .iter() - .filter(|e| match e.id() { - HistoryEntryId::AcpThread(session_id) => !exclude_threads.contains(&session_id), - HistoryEntryId::TextThread(path) => { - !exclude_paths.contains(&path.to_path_buf()) - } - }) - .take(RECENT_THREADS_COUNT) - .map(|thread| RecentEntry::Thread(thread.clone())), - ); - } - - recent -} - -fn add_selections_as_context( - context_store: &Entity, - workspace: &Entity, - cx: &mut App, -) { - let selection_ranges = selection_ranges(workspace, cx); - context_store.update(cx, |context_store, cx| { - for (buffer, range) in selection_ranges { - context_store.add_selection(buffer, range, cx); - } - }) -} - -pub(crate) fn selection_ranges( - workspace: &Entity, - cx: &mut App, -) -> Vec<(Entity, Range)> { - let Some(editor) = workspace - .read(cx) - .active_item(cx) - .and_then(|item| item.act_as::(cx)) - else { - return Vec::new(); - }; - - editor.update(cx, |editor, cx| { - let selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); - - let buffer = editor.buffer().clone().read(cx); - let snapshot = buffer.snapshot(cx); - - selections - .into_iter() - .map(|s| snapshot.anchor_after(s.start)..snapshot.anchor_before(s.end)) - .flat_map(|range| { - let (start_buffer, start) = buffer.text_anchor_for_position(range.start, cx)?; - let (end_buffer, end) = buffer.text_anchor_for_position(range.end, cx)?; - if start_buffer != end_buffer { - return None; - } - Some((start_buffer, start..end)) - }) - .collect::>() - }) -} - -pub(crate) fn insert_crease_for_mention( - excerpt_id: ExcerptId, - crease_start: text::Anchor, - content_len: usize, - crease_label: SharedString, - crease_icon_path: SharedString, - editor_entity: Entity, - window: &mut Window, - cx: &mut App, -) -> Option { - editor_entity.update(cx, |editor, cx| { - let snapshot = editor.buffer().read(cx).snapshot(cx); - - let start = snapshot.anchor_in_excerpt(excerpt_id, crease_start)?; - - let start = start.bias_right(&snapshot); - let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); - - let crease = crease_for_mention( - crease_label, - crease_icon_path, - start..end, - editor_entity.downgrade(), - ); - - let ids = editor.insert_creases(vec![crease.clone()], cx); - editor.fold_creases(vec![crease], false, window, cx); - - Some(ids[0]) - }) -} - -pub fn crease_for_mention( - label: SharedString, - icon_path: SharedString, - range: Range, - editor_entity: WeakEntity, -) -> Crease { - let placeholder = FoldPlaceholder { - render: render_fold_icon_button(icon_path.clone(), label.clone(), editor_entity), - merge_adjacent: false, - ..Default::default() - }; - - let render_trailer = move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any(); - - Crease::inline(range, placeholder, fold_toggle("mention"), render_trailer) - .with_metadata(CreaseMetadata { icon_path, label }) -} - -fn render_fold_icon_button( - icon_path: SharedString, - label: SharedString, - editor: WeakEntity, -) -> Arc, &mut App) -> AnyElement> { - Arc::new({ - move |fold_id, fold_range, cx| { - let is_in_text_selection = editor - .update(cx, |editor, cx| editor.is_range_selected(&fold_range, cx)) - .unwrap_or_default(); - - ButtonLike::new(fold_id) - .style(ButtonStyle::Filled) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .toggle_state(is_in_text_selection) - .child( - h_flex() - .gap_1() - .child( - Icon::from_path(icon_path.clone()) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(label.clone()) - .size(LabelSize::Small) - .buffer_font(cx) - .single_line(), - ), - ) - .into_any_element() - } - }) -} - -fn fold_toggle( - name: &'static str, -) -> impl Fn( - MultiBufferRow, - bool, - Arc, - &mut Window, - &mut App, -) -> AnyElement { - move |row, is_folded, fold, _window, _cx| { - Disclosure::new((name, row.0 as u64), !is_folded) - .toggle_state(is_folded) - .on_click(move |_e, window, cx| fold(!is_folded, window, cx)) - .into_any_element() - } -} - -pub struct MentionLink; - -impl MentionLink { - const FILE: &str = "@file"; - const SYMBOL: &str = "@symbol"; - const SELECTION: &str = "@selection"; - const THREAD: &str = "@thread"; - const FETCH: &str = "@fetch"; - const RULE: &str = "@rule"; - - const TEXT_THREAD_URL_PREFIX: &str = "text-thread://"; - - pub fn for_file(file_name: &str, full_path: &str) -> String { - format!("[@{}]({}:{})", file_name, Self::FILE, full_path) - } - - pub fn for_symbol(symbol_name: &str, full_path: &str) -> String { - format!( - "[@{}]({}:{}:{})", - symbol_name, - Self::SYMBOL, - full_path, - symbol_name - ) - } - - pub fn for_selection(file_name: &str, full_path: &str, line_range: Range) -> String { - format!( - "[@{} ({}-{})]({}:{}:{}-{})", - file_name, - line_range.start + 1, - line_range.end + 1, - Self::SELECTION, - full_path, - line_range.start, - line_range.end - ) - } - - pub fn for_thread(thread: &HistoryEntry) -> String { - match thread { - HistoryEntry::AcpThread(thread) => { - format!("[@{}]({}:{})", thread.title, Self::THREAD, thread.id) - } - HistoryEntry::TextThread(thread) => { - let filename = thread - .path - .file_name() - .unwrap_or_default() - .to_string_lossy(); - let escaped_filename = urlencoding::encode(&filename); - format!( - "[@{}]({}:{}{})", - thread.title, - Self::THREAD, - Self::TEXT_THREAD_URL_PREFIX, - escaped_filename - ) - } - } - } - - pub fn for_fetch(url: &str) -> String { - format!("[@{}]({}:{})", url, Self::FETCH, url) - } - - pub fn for_rule(rule: &RulesContextEntry) -> String { - format!("[@{}]({}:{})", rule.title, Self::RULE, rule.prompt_id.0) - } -} diff --git a/crates/agent_ui/src/context_picker/completion_provider.rs b/crates/agent_ui/src/context_picker/completion_provider.rs deleted file mode 100644 index 1fa128cde82dba900136ad6d136aad858512f169..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/completion_provider.rs +++ /dev/null @@ -1,1687 +0,0 @@ -use std::ops::Range; -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use agent::{HistoryEntry, HistoryStore}; -use anyhow::Result; -use editor::{CompletionProvider, Editor, ExcerptId, ToOffset as _}; -use file_icons::FileIcons; -use fuzzy::{StringMatch, StringMatchCandidate}; -use gpui::{App, Entity, Task, WeakEntity}; -use http_client::HttpClientWithUrl; -use itertools::Itertools; -use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId}; -use lsp::CompletionContext; -use project::lsp_store::SymbolLocation; -use project::{ - Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project, - ProjectPath, Symbol, WorktreeId, -}; -use prompt_store::PromptStore; -use rope::Point; -use text::{Anchor, OffsetRangeExt, ToPoint}; -use ui::prelude::*; -use util::ResultExt as _; -use util::paths::PathStyle; -use util::rel_path::RelPath; -use workspace::Workspace; - -use crate::{ - context::{AgentContextHandle, AgentContextKey, RULES_ICON}, - context_store::ContextStore, -}; - -use super::fetch_context_picker::fetch_url_content; -use super::file_context_picker::{FileMatch, search_files}; -use super::rules_context_picker::{RulesContextEntry, search_rules}; -use super::symbol_context_picker::SymbolMatch; -use super::symbol_context_picker::search_symbols; -use super::thread_context_picker::search_threads; -use super::{ - ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry, - available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges, -}; -use crate::inline_prompt_editor::ContextCreasesAddon; - -pub(crate) enum Match { - File(FileMatch), - Symbol(SymbolMatch), - Thread(HistoryEntry), - RecentThread(HistoryEntry), - Fetch(SharedString), - Rules(RulesContextEntry), - Entry(EntryMatch), -} - -pub struct EntryMatch { - mat: Option, - entry: ContextPickerEntry, -} - -impl Match { - pub fn score(&self) -> f64 { - match self { - Match::File(file) => file.mat.score, - Match::Entry(mode) => mode.mat.as_ref().map(|mat| mat.score).unwrap_or(1.), - Match::Thread(_) => 1., - Match::RecentThread(_) => 1., - Match::Symbol(_) => 1., - Match::Fetch(_) => 1., - Match::Rules(_) => 1., - } - } -} - -fn search( - mode: Option, - query: String, - cancellation_flag: Arc, - recent_entries: Vec, - prompt_store: Option>, - thread_store: Option>, - workspace: Entity, - cx: &mut App, -) -> Task> { - match mode { - Some(ContextPickerMode::File) => { - let search_files_task = search_files(query, cancellation_flag, &workspace, cx); - cx.background_spawn(async move { - search_files_task - .await - .into_iter() - .map(Match::File) - .collect() - }) - } - - Some(ContextPickerMode::Symbol) => { - let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx); - cx.background_spawn(async move { - search_symbols_task - .await - .into_iter() - .map(Match::Symbol) - .collect() - }) - } - - Some(ContextPickerMode::Thread) => { - if let Some(thread_store) = thread_store.as_ref().and_then(|t| t.upgrade()) { - let search_threads_task = - search_threads(query, cancellation_flag, &thread_store, cx); - cx.background_spawn(async move { - search_threads_task - .await - .into_iter() - .map(Match::Thread) - .collect() - }) - } else { - Task::ready(Vec::new()) - } - } - - Some(ContextPickerMode::Fetch) => { - if !query.is_empty() { - Task::ready(vec![Match::Fetch(query.into())]) - } else { - Task::ready(Vec::new()) - } - } - - Some(ContextPickerMode::Rules) => { - if let Some(prompt_store) = prompt_store.as_ref().and_then(|p| p.upgrade()) { - let search_rules_task = search_rules(query, cancellation_flag, &prompt_store, cx); - cx.background_spawn(async move { - search_rules_task - .await - .into_iter() - .map(Match::Rules) - .collect::>() - }) - } else { - Task::ready(Vec::new()) - } - } - - None => { - if query.is_empty() { - let mut matches = recent_entries - .into_iter() - .map(|entry| match entry { - super::RecentEntry::File { - project_path, - path_prefix, - } => Match::File(FileMatch { - mat: fuzzy::PathMatch { - score: 1., - positions: Vec::new(), - worktree_id: project_path.worktree_id.to_usize(), - path: project_path.path, - path_prefix, - is_dir: false, - distance_to_relative_ancestor: 0, - }, - is_recent: true, - }), - super::RecentEntry::Thread(entry) => Match::RecentThread(entry), - }) - .collect::>(); - - matches.extend( - available_context_picker_entries(&prompt_store, &thread_store, &workspace, cx) - .into_iter() - .map(|mode| { - Match::Entry(EntryMatch { - entry: mode, - mat: None, - }) - }), - ); - - Task::ready(matches) - } else { - let executor = cx.background_executor().clone(); - - let search_files_task = - search_files(query.clone(), cancellation_flag, &workspace, cx); - - let entries = - available_context_picker_entries(&prompt_store, &thread_store, &workspace, cx); - let entry_candidates = entries - .iter() - .enumerate() - .map(|(ix, entry)| StringMatchCandidate::new(ix, entry.keyword())) - .collect::>(); - - cx.background_spawn(async move { - let mut matches = search_files_task - .await - .into_iter() - .map(Match::File) - .collect::>(); - - let entry_matches = fuzzy::match_strings( - &entry_candidates, - &query, - false, - true, - 100, - &Arc::new(AtomicBool::default()), - executor, - ) - .await; - - matches.extend(entry_matches.into_iter().map(|mat| { - Match::Entry(EntryMatch { - entry: entries[mat.candidate_id], - mat: Some(mat), - }) - })); - - matches.sort_by(|a, b| { - b.score() - .partial_cmp(&a.score()) - .unwrap_or(std::cmp::Ordering::Equal) - }); - - matches - }) - } - } - } -} - -pub struct ContextPickerCompletionProvider { - workspace: WeakEntity, - context_store: WeakEntity, - thread_store: Option>, - prompt_store: Option>, - editor: WeakEntity, - excluded_buffer: Option>, -} - -impl ContextPickerCompletionProvider { - pub fn new( - workspace: WeakEntity, - context_store: WeakEntity, - thread_store: Option>, - prompt_store: Option>, - editor: WeakEntity, - exclude_buffer: Option>, - ) -> Self { - Self { - workspace, - context_store, - thread_store, - prompt_store, - editor, - excluded_buffer: exclude_buffer, - } - } - - fn completion_for_entry( - entry: ContextPickerEntry, - excerpt_id: ExcerptId, - source_range: Range, - editor: Entity, - context_store: Entity, - workspace: &Entity, - cx: &mut App, - ) -> Option { - match entry { - ContextPickerEntry::Mode(mode) => Some(Completion { - replace_range: source_range, - new_text: format!("@{} ", mode.keyword()), - label: CodeLabel::plain(mode.label().to_string(), None), - icon_path: Some(mode.icon().path().into()), - documentation: None, - source: project::CompletionSource::Custom, - insert_text_mode: None, - // This ensures that when a user accepts this completion, the - // completion menu will still be shown after "@category " is - // inserted - confirm: Some(Arc::new(|_, _, _| true)), - }), - ContextPickerEntry::Action(action) => { - let (new_text, on_action) = match action { - ContextPickerAction::AddSelections => { - let selections = selection_ranges(workspace, cx); - - let selection_infos = selections - .iter() - .map(|(buffer, range)| { - let full_path = buffer - .read(cx) - .file() - .map(|file| file.full_path(cx)) - .unwrap_or_else(|| PathBuf::from("untitled")); - let file_name = full_path - .file_name() - .unwrap_or_default() - .to_string_lossy() - .to_string(); - let line_range = range.to_point(&buffer.read(cx).snapshot()); - - let link = MentionLink::for_selection( - &file_name, - &full_path.to_string_lossy(), - line_range.start.row as usize..line_range.end.row as usize, - ); - (file_name, link, line_range) - }) - .collect::>(); - - let new_text = format!( - "{} ", - selection_infos.iter().map(|(_, link, _)| link).join(" ") - ); - - let callback = Arc::new({ - move |_, window: &mut Window, cx: &mut App| { - context_store.update(cx, |context_store, cx| { - for (buffer, range) in &selections { - context_store.add_selection( - buffer.clone(), - range.clone(), - cx, - ); - } - }); - - let editor = editor.clone(); - let selection_infos = selection_infos.clone(); - window.defer(cx, move |window, cx| { - let mut current_offset = 0; - for (file_name, link, line_range) in selection_infos.iter() { - let snapshot = - editor.read(cx).buffer().read(cx).snapshot(cx); - let Some(start) = snapshot - .anchor_in_excerpt(excerpt_id, source_range.start) - else { - return; - }; - - let offset = start.to_offset(&snapshot) + current_offset; - let text_len = link.len(); - - let range = snapshot.anchor_after(offset) - ..snapshot.anchor_after(offset + text_len); - - let crease = super::crease_for_mention( - format!( - "{} ({}-{})", - file_name, - line_range.start.row + 1, - line_range.end.row + 1 - ) - .into(), - IconName::Reader.path().into(), - range, - editor.downgrade(), - ); - - editor.update(cx, |editor, cx| { - editor.insert_creases(vec![crease.clone()], cx); - editor.fold_creases(vec![crease], false, window, cx); - }); - - current_offset += text_len + 1; - } - }); - - false - } - }); - - (new_text, callback) - } - }; - - Some(Completion { - replace_range: source_range.clone(), - new_text, - label: CodeLabel::plain(action.label().to_string(), None), - icon_path: Some(action.icon().path().into()), - documentation: None, - source: project::CompletionSource::Custom, - insert_text_mode: None, - // This ensures that when a user accepts this completion, the - // completion menu will still be shown after "@category " is - // inserted - confirm: Some(on_action), - }) - } - } - } - - fn completion_for_thread( - thread_entry: HistoryEntry, - excerpt_id: ExcerptId, - source_range: Range, - recent: bool, - editor: Entity, - context_store: Entity, - thread_store: Entity, - project: Entity, - ) -> Completion { - let icon_for_completion = if recent { - IconName::HistoryRerun - } else { - IconName::Thread - }; - let new_text = format!("{} ", MentionLink::for_thread(&thread_entry)); - let new_text_len = new_text.len(); - Completion { - replace_range: source_range.clone(), - new_text, - label: CodeLabel::plain(thread_entry.title().to_string(), None), - documentation: None, - insert_text_mode: None, - source: project::CompletionSource::Custom, - icon_path: Some(icon_for_completion.path().into()), - confirm: Some(confirm_completion_callback( - IconName::Thread.path().into(), - thread_entry.title().clone(), - excerpt_id, - source_range.start, - new_text_len - 1, - editor, - context_store.clone(), - move |window, cx| match &thread_entry { - HistoryEntry::AcpThread(thread) => { - let context_store = context_store.clone(); - let load_thread_task = agent::load_agent_thread( - thread.id.clone(), - thread_store.clone(), - project.clone(), - cx, - ); - window.spawn::<_, Option<_>>(cx, async move |cx| { - let thread = load_thread_task.await.log_err()?; - let context = context_store - .update(cx, |context_store, cx| { - context_store.add_thread(thread, false, cx) - }) - .ok()??; - Some(context) - }) - } - HistoryEntry::TextThread(thread) => { - let path = thread.path.clone(); - let context_store = context_store.clone(); - let thread_store = thread_store.clone(); - cx.spawn::<_, Option<_>>(async move |cx| { - let thread = thread_store - .update(cx, |store, cx| store.load_text_thread(path, cx)) - .ok()? - .await - .log_err()?; - let context = context_store - .update(cx, |context_store, cx| { - context_store.add_text_thread(thread, false, cx) - }) - .ok()??; - Some(context) - }) - } - }, - )), - } - } - - fn completion_for_rules( - rules: RulesContextEntry, - excerpt_id: ExcerptId, - source_range: Range, - editor: Entity, - context_store: Entity, - ) -> Completion { - let new_text = format!("{} ", MentionLink::for_rule(&rules)); - let new_text_len = new_text.len(); - Completion { - replace_range: source_range.clone(), - new_text, - label: CodeLabel::plain(rules.title.to_string(), None), - documentation: None, - insert_text_mode: None, - source: project::CompletionSource::Custom, - icon_path: Some(RULES_ICON.path().into()), - confirm: Some(confirm_completion_callback( - RULES_ICON.path().into(), - rules.title.clone(), - excerpt_id, - source_range.start, - new_text_len - 1, - editor, - context_store.clone(), - move |_, cx| { - let user_prompt_id = rules.prompt_id; - let context = context_store.update(cx, |context_store, cx| { - context_store.add_rules(user_prompt_id, false, cx) - }); - Task::ready(context) - }, - )), - } - } - - fn completion_for_fetch( - source_range: Range, - url_to_fetch: SharedString, - excerpt_id: ExcerptId, - editor: Entity, - context_store: Entity, - http_client: Arc, - ) -> Completion { - let new_text = format!("{} ", MentionLink::for_fetch(&url_to_fetch)); - let new_text_len = new_text.len(); - Completion { - replace_range: source_range.clone(), - new_text, - label: CodeLabel::plain(url_to_fetch.to_string(), None), - documentation: None, - source: project::CompletionSource::Custom, - icon_path: Some(IconName::ToolWeb.path().into()), - insert_text_mode: None, - confirm: Some(confirm_completion_callback( - IconName::ToolWeb.path().into(), - url_to_fetch.clone(), - excerpt_id, - source_range.start, - new_text_len - 1, - editor, - context_store.clone(), - move |_, cx| { - let context_store = context_store.clone(); - let http_client = http_client.clone(); - let url_to_fetch = url_to_fetch.clone(); - cx.spawn(async move |cx| { - if let Some(context) = context_store - .read_with(cx, |context_store, _| { - context_store.get_url_context(url_to_fetch.clone()) - }) - .ok()? - { - return Some(context); - } - let content = cx - .background_spawn(fetch_url_content( - http_client, - url_to_fetch.to_string(), - )) - .await - .log_err()?; - context_store - .update(cx, |context_store, cx| { - context_store.add_fetched_url(url_to_fetch.to_string(), content, cx) - }) - .ok() - }) - }, - )), - } - } - - fn completion_for_path( - project_path: ProjectPath, - path_prefix: &RelPath, - is_recent: bool, - is_directory: bool, - excerpt_id: ExcerptId, - source_range: Range, - path_style: PathStyle, - editor: Entity, - context_store: Entity, - cx: &App, - ) -> Completion { - let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory( - &project_path.path, - path_prefix, - path_style, - ); - - let label = - build_code_label_for_full_path(&file_name, directory.as_ref().map(|s| s.as_ref()), cx); - let full_path = if let Some(directory) = directory { - format!("{}{}", directory, file_name) - } else { - file_name.to_string() - }; - - let path = Path::new(&full_path); - let crease_icon_path = if is_directory { - FileIcons::get_folder_icon(false, path, cx) - .unwrap_or_else(|| IconName::Folder.path().into()) - } else { - FileIcons::get_icon(path, cx).unwrap_or_else(|| IconName::File.path().into()) - }; - let completion_icon_path = if is_recent { - IconName::HistoryRerun.path().into() - } else { - crease_icon_path.clone() - }; - - let new_text = format!("{} ", MentionLink::for_file(&file_name, &full_path)); - let new_text_len = new_text.len(); - Completion { - replace_range: source_range.clone(), - new_text, - label, - documentation: None, - source: project::CompletionSource::Custom, - icon_path: Some(completion_icon_path), - insert_text_mode: None, - confirm: Some(confirm_completion_callback( - crease_icon_path, - file_name, - excerpt_id, - source_range.start, - new_text_len - 1, - editor, - context_store.clone(), - move |_, cx| { - if is_directory { - Task::ready( - context_store - .update(cx, |context_store, cx| { - context_store.add_directory(&project_path, false, cx) - }) - .log_err() - .flatten(), - ) - } else { - let result = context_store.update(cx, |context_store, cx| { - context_store.add_file_from_path(project_path.clone(), false, cx) - }); - cx.spawn(async move |_| result.await.log_err().flatten()) - } - }, - )), - } - } - - fn completion_for_symbol( - symbol: Symbol, - excerpt_id: ExcerptId, - source_range: Range, - editor: Entity, - context_store: Entity, - workspace: Entity, - cx: &mut App, - ) -> Option { - let path_style = workspace.read(cx).path_style(cx); - let SymbolLocation::InProject(symbol_path) = &symbol.path else { - return None; - }; - let _path_prefix = workspace - .read(cx) - .project() - .read(cx) - .worktree_for_id(symbol_path.worktree_id, cx)?; - let path_prefix = RelPath::empty(); - - let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory( - &symbol_path.path, - path_prefix, - path_style, - ); - let full_path = if let Some(directory) = directory { - format!("{}{}", directory, file_name) - } else { - file_name.to_string() - }; - - let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabelBuilder::default(); - label.push_str(&symbol.name, None); - label.push_str(" ", None); - label.push_str(&file_name, comment_id); - label.push_str(&format!(" L{}", symbol.range.start.0.row + 1), comment_id); - - let new_text = format!("{} ", MentionLink::for_symbol(&symbol.name, &full_path)); - let new_text_len = new_text.len(); - Some(Completion { - replace_range: source_range.clone(), - new_text, - label: label.build(), - documentation: None, - source: project::CompletionSource::Custom, - icon_path: Some(IconName::Code.path().into()), - insert_text_mode: None, - confirm: Some(confirm_completion_callback( - IconName::Code.path().into(), - symbol.name.clone().into(), - excerpt_id, - source_range.start, - new_text_len - 1, - editor, - context_store.clone(), - move |_, cx| { - let symbol = symbol.clone(); - let context_store = context_store.clone(); - let workspace = workspace.clone(); - let result = super::symbol_context_picker::add_symbol( - symbol, - false, - workspace, - context_store.downgrade(), - cx, - ); - cx.spawn(async move |_| result.await.log_err()?.0) - }, - )), - }) - } -} - -fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { - let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabelBuilder::default(); - - label.push_str(file_name, None); - label.push_str(" ", None); - - if let Some(directory) = directory { - label.push_str(directory, comment_id); - } - - label.build() -} - -impl CompletionProvider for ContextPickerCompletionProvider { - fn completions( - &self, - excerpt_id: ExcerptId, - buffer: &Entity, - buffer_position: Anchor, - _trigger: CompletionContext, - _window: &mut Window, - cx: &mut Context, - ) -> Task>> { - let snapshot = buffer.read(cx).snapshot(); - let position = buffer_position.to_point(&snapshot); - let line_start = Point::new(position.row, 0); - let offset_to_line = snapshot.point_to_offset(line_start); - let mut lines = snapshot.text_for_range(line_start..position).lines(); - let Some(line) = lines.next() else { - return Task::ready(Ok(Vec::new())); - }; - let Some(state) = MentionCompletion::try_parse(line, offset_to_line) else { - return Task::ready(Ok(Vec::new())); - }; - - let Some((workspace, context_store)) = - self.workspace.upgrade().zip(self.context_store.upgrade()) - else { - return Task::ready(Ok(Vec::new())); - }; - - let source_range = snapshot.anchor_before(state.source_range.start) - ..snapshot.anchor_after(state.source_range.end); - - let thread_store = self.thread_store.clone(); - let prompt_store = self.prompt_store.clone(); - let editor = self.editor.clone(); - let http_client = workspace.read(cx).client().http_client(); - let path_style = workspace.read(cx).path_style(cx); - - let MentionCompletion { mode, argument, .. } = state; - let query = argument.unwrap_or_else(|| "".to_string()); - - let excluded_path = self - .excluded_buffer - .as_ref() - .and_then(WeakEntity::upgrade) - .and_then(|b| b.read(cx).file()) - .map(|file| ProjectPath::from_file(file.as_ref(), cx)); - - let recent_entries = recent_context_picker_entries_with_store( - context_store.clone(), - thread_store.clone(), - workspace.clone(), - excluded_path.clone(), - cx, - ); - - let search_task = search( - mode, - query, - Arc::::default(), - recent_entries, - prompt_store, - thread_store.clone(), - workspace.clone(), - cx, - ); - let project = workspace.read(cx).project().downgrade(); - - cx.spawn(async move |_, cx| { - let matches = search_task.await; - let Some((editor, project)) = editor.upgrade().zip(project.upgrade()) else { - return Ok(Vec::new()); - }; - - let completions = cx.update(|cx| { - matches - .into_iter() - .filter_map(|mat| match mat { - Match::File(FileMatch { mat, is_recent }) => { - let project_path = ProjectPath { - worktree_id: WorktreeId::from_usize(mat.worktree_id), - path: mat.path.clone(), - }; - - if excluded_path.as_ref() == Some(&project_path) { - return None; - } - - // If path is empty, this means we're matching with the root directory itself - // so we use the path_prefix as the name - let path_prefix = if mat.path.is_empty() { - project - .read(cx) - .worktree_for_id(project_path.worktree_id, cx) - .map(|wt| wt.read(cx).root_name().into()) - .unwrap_or_else(|| mat.path_prefix.clone()) - } else { - mat.path_prefix.clone() - }; - - Some(Self::completion_for_path( - project_path, - &path_prefix, - is_recent, - mat.is_dir, - excerpt_id, - source_range.clone(), - path_style, - editor.clone(), - context_store.clone(), - cx, - )) - } - - Match::Symbol(SymbolMatch { symbol, .. }) => Self::completion_for_symbol( - symbol, - excerpt_id, - source_range.clone(), - editor.clone(), - context_store.clone(), - workspace.clone(), - cx, - ), - Match::Thread(thread) => { - let thread_store = thread_store.as_ref().and_then(|t| t.upgrade())?; - Some(Self::completion_for_thread( - thread, - excerpt_id, - source_range.clone(), - false, - editor.clone(), - context_store.clone(), - thread_store, - project.clone(), - )) - } - Match::RecentThread(thread) => { - let thread_store = thread_store.as_ref().and_then(|t| t.upgrade())?; - Some(Self::completion_for_thread( - thread, - excerpt_id, - source_range.clone(), - true, - editor.clone(), - context_store.clone(), - thread_store, - project.clone(), - )) - } - Match::Rules(user_rules) => Some(Self::completion_for_rules( - user_rules, - excerpt_id, - source_range.clone(), - editor.clone(), - context_store.clone(), - )), - - Match::Fetch(url) => Some(Self::completion_for_fetch( - source_range.clone(), - url, - excerpt_id, - editor.clone(), - context_store.clone(), - http_client.clone(), - )), - - Match::Entry(EntryMatch { entry, .. }) => Self::completion_for_entry( - entry, - excerpt_id, - source_range.clone(), - editor.clone(), - context_store.clone(), - &workspace, - cx, - ), - }) - .collect() - })?; - - Ok(vec![CompletionResponse { - completions, - display_options: CompletionDisplayOptions::default(), - // Since this does its own filtering (see `filter_completions()` returns false), - // there is no benefit to computing whether this set of completions is incomplete. - is_incomplete: true, - }]) - }) - } - - fn is_completion_trigger( - &self, - buffer: &Entity, - position: language::Anchor, - _text: &str, - _trigger_in_words: bool, - _menu_is_open: bool, - cx: &mut Context, - ) -> bool { - let buffer = buffer.read(cx); - let position = position.to_point(buffer); - let line_start = Point::new(position.row, 0); - let offset_to_line = buffer.point_to_offset(line_start); - let mut lines = buffer.text_for_range(line_start..position).lines(); - if let Some(line) = lines.next() { - MentionCompletion::try_parse(line, offset_to_line) - .map(|completion| { - completion.source_range.start <= offset_to_line + position.column as usize - && completion.source_range.end >= offset_to_line + position.column as usize - }) - .unwrap_or(false) - } else { - false - } - } - - fn sort_completions(&self) -> bool { - false - } - - fn filter_completions(&self) -> bool { - false - } -} - -fn confirm_completion_callback( - crease_icon_path: SharedString, - crease_text: SharedString, - excerpt_id: ExcerptId, - start: Anchor, - content_len: usize, - editor: Entity, - context_store: Entity, - add_context_fn: impl Fn(&mut Window, &mut App) -> Task> - + Send - + Sync - + 'static, -) -> Arc bool + Send + Sync> { - Arc::new(move |_, window, cx| { - let context = add_context_fn(window, cx); - - let crease_text = crease_text.clone(); - let crease_icon_path = crease_icon_path.clone(); - let editor = editor.clone(); - let context_store = context_store.clone(); - window.defer(cx, move |window, cx| { - let crease_id = crate::context_picker::insert_crease_for_mention( - excerpt_id, - start, - content_len, - crease_text.clone(), - crease_icon_path, - editor.clone(), - window, - cx, - ); - cx.spawn(async move |cx| { - let crease_id = crease_id?; - let context = context.await?; - editor - .update(cx, |editor, cx| { - if let Some(addon) = editor.addon_mut::() { - addon.add_creases( - &context_store, - AgentContextKey(context), - [(crease_id, crease_text)], - cx, - ); - } - }) - .ok() - }) - .detach(); - }); - false - }) -} - -#[derive(Debug, Default, PartialEq)] -struct MentionCompletion { - source_range: Range, - mode: Option, - argument: Option, -} - -impl MentionCompletion { - fn try_parse(line: &str, offset_to_line: usize) -> Option { - let last_mention_start = line.rfind('@')?; - if last_mention_start >= line.len() { - return Some(Self::default()); - } - if last_mention_start > 0 - && line - .chars() - .nth(last_mention_start - 1) - .is_some_and(|c| !c.is_whitespace()) - { - return None; - } - - let rest_of_line = &line[last_mention_start + 1..]; - - let mut mode = None; - let mut argument = None; - - let mut parts = rest_of_line.split_whitespace(); - let mut end = last_mention_start + 1; - if let Some(mode_text) = parts.next() { - end += mode_text.len(); - - if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() { - mode = Some(parsed_mode); - } else { - argument = Some(mode_text.to_string()); - } - match rest_of_line[mode_text.len()..].find(|c: char| !c.is_whitespace()) { - Some(whitespace_count) => { - if let Some(argument_text) = parts.next() { - argument = Some(argument_text.to_string()); - end += whitespace_count + argument_text.len(); - } - } - None => { - // Rest of line is entirely whitespace - end += rest_of_line.len() - mode_text.len(); - } - } - } - - Some(Self { - source_range: last_mention_start + offset_to_line..end + offset_to_line, - mode, - argument, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use editor::AnchorRangeExt; - use gpui::{EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext}; - use project::{Project, ProjectPath}; - use serde_json::json; - use settings::SettingsStore; - use std::{ops::Deref, rc::Rc}; - use util::{path, rel_path::rel_path}; - use workspace::{AppState, Item}; - - #[test] - fn test_mention_completion_parse() { - assert_eq!(MentionCompletion::try_parse("Lorem Ipsum", 0), None); - - assert_eq!( - MentionCompletion::try_parse("Lorem @", 0), - Some(MentionCompletion { - source_range: 6..7, - mode: None, - argument: None, - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @file", 0), - Some(MentionCompletion { - source_range: 6..11, - mode: Some(ContextPickerMode::File), - argument: None, - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @file ", 0), - Some(MentionCompletion { - source_range: 6..12, - mode: Some(ContextPickerMode::File), - argument: None, - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs", 0), - Some(MentionCompletion { - source_range: 6..19, - mode: Some(ContextPickerMode::File), - argument: Some("main.rs".to_string()), - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs ", 0), - Some(MentionCompletion { - source_range: 6..19, - mode: Some(ContextPickerMode::File), - argument: Some("main.rs".to_string()), - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs Ipsum", 0), - Some(MentionCompletion { - source_range: 6..19, - mode: Some(ContextPickerMode::File), - argument: Some("main.rs".to_string()), - }) - ); - - assert_eq!( - MentionCompletion::try_parse("Lorem @main", 0), - Some(MentionCompletion { - source_range: 6..11, - mode: None, - argument: Some("main".to_string()), - }) - ); - - assert_eq!(MentionCompletion::try_parse("test@", 0), None); - } - - struct AtMentionEditor(Entity); - - impl Item for AtMentionEditor { - type Event = (); - - fn include_in_nav_history() -> bool { - false - } - - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Test".into() - } - } - - impl EventEmitter<()> for AtMentionEditor {} - - impl Focusable for AtMentionEditor { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.0.read(cx).focus_handle(cx) - } - } - - impl Render for AtMentionEditor { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.0.clone().into_any_element() - } - } - - #[gpui::test] - async fn test_context_completion_provider(cx: &mut TestAppContext) { - init_test(cx); - - let app_state = cx.update(AppState::test); - - cx.update(|cx| { - editor::init(cx); - workspace::init(app_state.clone(), cx); - }); - - app_state - .fs - .as_fake() - .insert_tree( - path!("/dir"), - json!({ - "editor": "", - "a": { - "one.txt": "", - "two.txt": "", - "three.txt": "", - "four.txt": "" - }, - "b": { - "five.txt": "", - "six.txt": "", - "seven.txt": "", - "eight.txt": "", - } - }), - ) - .await; - - let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; - let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let workspace = window.root(cx).unwrap(); - - let worktree = project.update(cx, |project, cx| { - let mut worktrees = project.worktrees(cx).collect::>(); - assert_eq!(worktrees.len(), 1); - worktrees.pop().unwrap() - }); - let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); - - let mut cx = VisualTestContext::from_window(*window.deref(), cx); - - let paths = vec![ - rel_path("a/one.txt"), - rel_path("a/two.txt"), - rel_path("a/three.txt"), - rel_path("a/four.txt"), - rel_path("b/five.txt"), - rel_path("b/six.txt"), - rel_path("b/seven.txt"), - rel_path("b/eight.txt"), - ]; - - let slash = PathStyle::local().separator(); - - let mut opened_editors = Vec::new(); - for path in paths { - let buffer = workspace - .update_in(&mut cx, |workspace, window, cx| { - workspace.open_path( - ProjectPath { - worktree_id, - path: path.into(), - }, - None, - false, - window, - cx, - ) - }) - .await - .unwrap(); - opened_editors.push(buffer); - } - - let editor = workspace.update_in(&mut cx, |workspace, window, cx| { - let editor = cx.new(|cx| { - Editor::new( - editor::EditorMode::full(), - multi_buffer::MultiBuffer::build_simple("", cx), - None, - window, - cx, - ) - }); - workspace.active_pane().update(cx, |pane, cx| { - pane.add_item( - Box::new(cx.new(|_| AtMentionEditor(editor.clone()))), - true, - true, - None, - window, - cx, - ); - }); - editor - }); - - let context_store = cx.new(|_| ContextStore::new(project.downgrade())); - - let editor_entity = editor.downgrade(); - editor.update_in(&mut cx, |editor, window, cx| { - let last_opened_buffer = opened_editors.last().and_then(|editor| { - editor - .downcast::()? - .read(cx) - .buffer() - .read(cx) - .as_singleton() - .as_ref() - .map(Entity::downgrade) - }); - window.focus(&editor.focus_handle(cx)); - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - workspace.downgrade(), - context_store.downgrade(), - None, - None, - editor_entity, - last_opened_buffer, - )))); - }); - - cx.simulate_input("Lorem "); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem "); - assert!(!editor.has_visible_completions_menu()); - }); - - cx.simulate_input("@"); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem @"); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - current_completion_labels(editor), - &[ - format!("seven.txt b{slash}"), - format!("six.txt b{slash}"), - format!("five.txt b{slash}"), - format!("four.txt a{slash}"), - "Files & Directories".into(), - "Symbols".into(), - "Fetch".into() - ] - ); - }); - - // Select and confirm "File" - editor.update_in(&mut cx, |editor, window, cx| { - assert!(editor.has_visible_completions_menu()); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - cx.run_until_parked(); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem @file "); - assert!(editor.has_visible_completions_menu()); - }); - - cx.simulate_input("one"); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem @file one"); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - current_completion_labels(editor), - vec![format!("one.txt a{slash}")] - ); - }); - - editor.update_in(&mut cx, |editor, window, cx| { - assert!(editor.has_visible_completions_menu()); - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) ") - ); - assert!(!editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 33)] - ); - }); - - cx.simulate_input(" "); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) ") - ); - assert!(!editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 33)] - ); - }); - - cx.simulate_input("Ipsum "); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum "), - ); - assert!(!editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 33)] - ); - }); - - cx.simulate_input("@file "); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum @file "), - ); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![Point::new(0, 6)..Point::new(0, 33)] - ); - }); - - editor.update_in(&mut cx, |editor, window, cx| { - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - cx.run_until_parked(); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) ") - ); - assert!(!editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![ - Point::new(0, 6)..Point::new(0, 33), - Point::new(0, 41)..Point::new(0, 72) - ] - ); - }); - - cx.simulate_input("\n@"); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n@") - ); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![ - Point::new(0, 6)..Point::new(0, 33), - Point::new(0, 41)..Point::new(0, 72) - ] - ); - }); - - editor.update_in(&mut cx, |editor, window, cx| { - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - cx.run_until_parked(); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n[@six.txt](@file:b{slash}six.txt) ") - ); - assert!(!editor.has_visible_completions_menu()); - assert_eq!( - fold_ranges(editor, cx), - vec![ - Point::new(0, 6)..Point::new(0, 33), - Point::new(0, 41)..Point::new(0, 72), - Point::new(1, 0)..Point::new(1, 27) - ] - ); - }); - } - - #[gpui::test] - async fn test_context_completion_provider_multiple_worktrees(cx: &mut TestAppContext) { - init_test(cx); - - let app_state = cx.update(AppState::test); - - cx.update(|cx| { - editor::init(cx); - workspace::init(app_state.clone(), cx); - }); - - app_state - .fs - .as_fake() - .insert_tree( - path!("/project1"), - json!({ - "a": { - "one.txt": "", - "two.txt": "", - } - }), - ) - .await; - - app_state - .fs - .as_fake() - .insert_tree( - path!("/project2"), - json!({ - "b": { - "three.txt": "", - "four.txt": "", - } - }), - ) - .await; - - let project = Project::test( - app_state.fs.clone(), - [path!("/project1").as_ref(), path!("/project2").as_ref()], - cx, - ) - .await; - let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let workspace = window.root(cx).unwrap(); - - let worktrees = project.update(cx, |project, cx| { - let worktrees = project.worktrees(cx).collect::>(); - assert_eq!(worktrees.len(), 2); - worktrees - }); - - let mut cx = VisualTestContext::from_window(*window.deref(), cx); - let slash = PathStyle::local().separator(); - - for (worktree_idx, paths) in [ - vec![rel_path("a/one.txt"), rel_path("a/two.txt")], - vec![rel_path("b/three.txt"), rel_path("b/four.txt")], - ] - .iter() - .enumerate() - { - let worktree_id = worktrees[worktree_idx].read_with(&cx, |wt, _| wt.id()); - for path in paths { - workspace - .update_in(&mut cx, |workspace, window, cx| { - workspace.open_path( - ProjectPath { - worktree_id, - path: (*path).into(), - }, - None, - false, - window, - cx, - ) - }) - .await - .unwrap(); - } - } - - let editor = workspace.update_in(&mut cx, |workspace, window, cx| { - let editor = cx.new(|cx| { - Editor::new( - editor::EditorMode::full(), - multi_buffer::MultiBuffer::build_simple("", cx), - None, - window, - cx, - ) - }); - workspace.active_pane().update(cx, |pane, cx| { - pane.add_item( - Box::new(cx.new(|_| AtMentionEditor(editor.clone()))), - true, - true, - None, - window, - cx, - ); - }); - editor - }); - - let context_store = cx.new(|_| ContextStore::new(project.downgrade())); - - let editor_entity = editor.downgrade(); - editor.update_in(&mut cx, |editor, window, cx| { - window.focus(&editor.focus_handle(cx)); - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - workspace.downgrade(), - context_store.downgrade(), - None, - None, - editor_entity, - None, - )))); - }); - - cx.simulate_input("@"); - - // With multiple worktrees, we should see the project name as prefix - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "@"); - assert!(editor.has_visible_completions_menu()); - let labels = current_completion_labels(editor); - - assert!( - labels.contains(&format!("four.txt project2{slash}b{slash}")), - "Expected 'four.txt project2{slash}b{slash}' in labels: {:?}", - labels - ); - assert!( - labels.contains(&format!("three.txt project2{slash}b{slash}")), - "Expected 'three.txt project2{slash}b{slash}' in labels: {:?}", - labels - ); - }); - - editor.update_in(&mut cx, |editor, window, cx| { - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx); - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - cx.run_until_parked(); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "@file "); - assert!(editor.has_visible_completions_menu()); - }); - - cx.simulate_input("one"); - - editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "@file one"); - assert!(editor.has_visible_completions_menu()); - assert_eq!( - current_completion_labels(editor), - vec![format!("one.txt project1{slash}a{slash}")] - ); - }); - - editor.update_in(&mut cx, |editor, window, cx| { - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - - editor.update(&mut cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("[@one.txt](@file:project1{slash}a{slash}one.txt) ") - ); - assert!(!editor.has_visible_completions_menu()); - }); - } - - fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec> { - let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.display_map.update(cx, |display_map, cx| { - display_map - .snapshot(cx) - .folds_in_range(0..snapshot.len()) - .map(|fold| fold.range.to_point(&snapshot)) - .collect() - }) - } - - fn current_completion_labels(editor: &Editor) -> Vec { - let completions = editor.current_completions().expect("Missing completions"); - completions - .into_iter() - .map(|completion| completion.label.text) - .collect::>() - } - - pub(crate) fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let store = SettingsStore::test(cx); - cx.set_global(store); - theme::init(theme::LoadThemes::JustBase, cx); - }); - } -} diff --git a/crates/agent_ui/src/context_picker/fetch_context_picker.rs b/crates/agent_ui/src/context_picker/fetch_context_picker.rs deleted file mode 100644 index 31fc45aca3ccbf561793769939169d214aaa2d99..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/fetch_context_picker.rs +++ /dev/null @@ -1,252 +0,0 @@ -use std::cell::RefCell; -use std::rc::Rc; -use std::sync::Arc; - -use anyhow::{Context as _, Result, bail}; -use futures::AsyncReadExt as _; -use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity}; -use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown}; -use http_client::{AsyncBody, HttpClientWithUrl}; -use picker::{Picker, PickerDelegate}; -use ui::{Context, ListItem, Window, prelude::*}; -use workspace::Workspace; - -use crate::{context_picker::ContextPicker, context_store::ContextStore}; - -pub struct FetchContextPicker { - picker: Entity>, -} - -impl FetchContextPicker { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = FetchContextPickerDelegate::new(context_picker, workspace, context_store); - let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); - - Self { picker } - } -} - -impl Focusable for FetchContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) - } -} - -impl Render for FetchContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.picker.clone() - } -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -enum ContentType { - Html, - Plaintext, - Json, -} - -pub struct FetchContextPickerDelegate { - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - url: String, -} - -impl FetchContextPickerDelegate { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - ) -> Self { - FetchContextPickerDelegate { - context_picker, - workspace, - context_store, - url: String::new(), - } - } -} - -pub(crate) async fn fetch_url_content( - http_client: Arc, - url: String, -) -> Result { - let url = if !url.starts_with("https://") && !url.starts_with("http://") { - format!("https://{url}") - } else { - url - }; - - let mut response = http_client.get(&url, AsyncBody::default(), true).await?; - - let mut body = Vec::new(); - response - .body_mut() - .read_to_end(&mut body) - .await - .context("error reading response body")?; - - if response.status().is_client_error() { - let text = String::from_utf8_lossy(body.as_slice()); - bail!( - "status error {}, response: {text:?}", - response.status().as_u16() - ); - } - - let Some(content_type) = response.headers().get("content-type") else { - bail!("missing Content-Type header"); - }; - let content_type = content_type - .to_str() - .context("invalid Content-Type header")?; - let content_type = match content_type { - "text/html" => ContentType::Html, - "text/plain" => ContentType::Plaintext, - "application/json" => ContentType::Json, - _ => ContentType::Html, - }; - - match content_type { - ContentType::Html => { - let mut handlers: Vec = vec![ - Rc::new(RefCell::new(markdown::WebpageChromeRemover)), - Rc::new(RefCell::new(markdown::ParagraphHandler)), - Rc::new(RefCell::new(markdown::HeadingHandler)), - Rc::new(RefCell::new(markdown::ListHandler)), - Rc::new(RefCell::new(markdown::TableHandler::new())), - Rc::new(RefCell::new(markdown::StyledTextHandler)), - ]; - if url.contains("wikipedia.org") { - use html_to_markdown::structure::wikipedia; - - handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover))); - handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler))); - handlers.push(Rc::new( - RefCell::new(wikipedia::WikipediaCodeHandler::new()), - )); - } else { - handlers.push(Rc::new(RefCell::new(markdown::CodeHandler))); - } - - convert_html_to_markdown(&body[..], &mut handlers) - } - ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()), - ContentType::Json => { - let json: serde_json::Value = serde_json::from_slice(&body)?; - - Ok(format!( - "```json\n{}\n```", - serde_json::to_string_pretty(&json)? - )) - } - } -} - -impl PickerDelegate for FetchContextPickerDelegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - if self.url.is_empty() { 0 } else { 1 } - } - - fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { - Some("Enter the URL that you would like to fetch".into()) - } - - fn selected_index(&self) -> usize { - 0 - } - - fn set_selected_index( - &mut self, - _ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Enter a URL…".into() - } - - fn update_matches( - &mut self, - query: String, - _window: &mut Window, - _cx: &mut Context>, - ) -> Task<()> { - self.url = query; - - Task::ready(()) - } - - fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - let Some(workspace) = self.workspace.upgrade() else { - return; - }; - - let http_client = workspace.read(cx).client().http_client(); - let url = self.url.clone(); - cx.spawn_in(window, async move |this, cx| { - let text = cx - .background_spawn(fetch_url_content(http_client, url.clone())) - .await?; - - this.update(cx, |this, cx| { - this.delegate.context_store.update(cx, |context_store, cx| { - context_store.add_fetched_url(url, text, cx) - }) - })??; - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - - fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { - self.context_picker - .update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let added = self - .context_store - .upgrade() - .is_some_and(|context_store| context_store.read(cx).includes_url(&self.url)); - - Some( - ListItem::new(ix) - .inset(true) - .toggle_state(selected) - .child(Label::new(self.url.clone())) - .when(added, |child| { - child.disabled(true).end_slot( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Added").size(LabelSize::Small)), - ) - }), - ) - } -} diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs deleted file mode 100644 index ded24caa922d27d8821e46e5c58b5ed22ab754ff..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ /dev/null @@ -1,392 +0,0 @@ -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use file_icons::FileIcons; -use fuzzy::PathMatch; -use gpui::{ - App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity, -}; -use picker::{Picker, PickerDelegate}; -use project::{PathMatchCandidateSet, ProjectPath, WorktreeId}; -use ui::{ListItem, Tooltip, prelude::*}; -use util::{ResultExt as _, paths::PathStyle, rel_path::RelPath}; -use workspace::Workspace; - -use crate::{ - context_picker::ContextPicker, - context_store::{ContextStore, FileInclusion}, -}; - -pub struct FileContextPicker { - picker: Entity>, -} - -impl FileContextPicker { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = FileContextPickerDelegate::new(context_picker, workspace, context_store); - let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); - - Self { picker } - } -} - -impl Focusable for FileContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) - } -} - -impl Render for FileContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.picker.clone() - } -} - -pub struct FileContextPickerDelegate { - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - matches: Vec, - selected_index: usize, -} - -impl FileContextPickerDelegate { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - ) -> Self { - Self { - context_picker, - workspace, - context_store, - matches: Vec::new(), - selected_index: 0, - } - } -} - -impl PickerDelegate for FileContextPickerDelegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - self.matches.len() - } - - fn selected_index(&self) -> usize { - self.selected_index - } - - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search files & directories…".into() - } - - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let Some(workspace) = self.workspace.upgrade() else { - return Task::ready(()); - }; - - let search_task = search_files(query, Arc::::default(), &workspace, cx); - - cx.spawn_in(window, async move |this, cx| { - // TODO: This should be probably be run in the background. - let paths = search_task.await; - - this.update(cx, |this, _cx| { - this.delegate.matches = paths; - }) - .log_err(); - }) - } - - fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context>) { - let Some(FileMatch { mat, .. }) = self.matches.get(self.selected_index) else { - return; - }; - - let project_path = ProjectPath { - worktree_id: WorktreeId::from_usize(mat.worktree_id), - path: mat.path.clone(), - }; - - let is_directory = mat.is_dir; - - self.context_store - .update(cx, |context_store, cx| { - if is_directory { - context_store - .add_directory(&project_path, true, cx) - .log_err(); - } else { - context_store - .add_file_from_path(project_path.clone(), true, cx) - .detach_and_log_err(cx); - } - }) - .ok(); - } - - fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { - self.context_picker - .update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let FileMatch { mat, .. } = &self.matches.get(ix)?; - let workspace = self.workspace.upgrade()?; - let path_style = workspace.read(cx).path_style(cx); - - Some( - ListItem::new(ix) - .inset(true) - .toggle_state(selected) - .child(render_file_context_entry( - ElementId::named_usize("file-ctx-picker", ix), - WorktreeId::from_usize(mat.worktree_id), - &mat.path, - &mat.path_prefix, - mat.is_dir, - path_style, - self.context_store.clone(), - cx, - )), - ) - } -} - -pub struct FileMatch { - pub mat: PathMatch, - pub is_recent: bool, -} - -pub(crate) fn search_files( - query: String, - cancellation_flag: Arc, - workspace: &Entity, - cx: &App, -) -> Task> { - if query.is_empty() { - let workspace = workspace.read(cx); - let project = workspace.project().read(cx); - let visible_worktrees = workspace.visible_worktrees(cx).collect::>(); - let include_root_name = visible_worktrees.len() > 1; - - let recent_matches = workspace - .recent_navigation_history(Some(10), cx) - .into_iter() - .map(|(project_path, _)| { - let path_prefix = if include_root_name { - project - .worktree_for_id(project_path.worktree_id, cx) - .map(|wt| wt.read(cx).root_name().into()) - .unwrap_or_else(|| RelPath::empty().into()) - } else { - RelPath::empty().into() - }; - - FileMatch { - mat: PathMatch { - score: 0., - positions: Vec::new(), - worktree_id: project_path.worktree_id.to_usize(), - path: project_path.path, - path_prefix, - distance_to_relative_ancestor: 0, - is_dir: false, - }, - is_recent: true, - } - }); - - let file_matches = visible_worktrees.into_iter().flat_map(|worktree| { - let worktree = worktree.read(cx); - let path_prefix: Arc = if include_root_name { - worktree.root_name().into() - } else { - RelPath::empty().into() - }; - worktree.entries(false, 0).map(move |entry| FileMatch { - mat: PathMatch { - score: 0., - positions: Vec::new(), - worktree_id: worktree.id().to_usize(), - path: entry.path.clone(), - path_prefix: path_prefix.clone(), - distance_to_relative_ancestor: 0, - is_dir: entry.is_dir(), - }, - is_recent: false, - }) - }); - - Task::ready(recent_matches.chain(file_matches).collect()) - } else { - let worktrees = workspace.read(cx).visible_worktrees(cx).collect::>(); - let include_root_name = worktrees.len() > 1; - let candidate_sets = worktrees - .into_iter() - .map(|worktree| { - let worktree = worktree.read(cx); - - PathMatchCandidateSet { - snapshot: worktree.snapshot(), - include_ignored: worktree.root_entry().is_some_and(|entry| entry.is_ignored), - include_root_name, - candidates: project::Candidates::Entries, - } - }) - .collect::>(); - - let executor = cx.background_executor().clone(); - cx.foreground_executor().spawn(async move { - fuzzy::match_path_sets( - candidate_sets.as_slice(), - query.as_str(), - &None, - false, - 100, - &cancellation_flag, - executor, - ) - .await - .into_iter() - .map(|mat| FileMatch { - mat, - is_recent: false, - }) - .collect::>() - }) - } -} - -pub fn extract_file_name_and_directory( - path: &RelPath, - path_prefix: &RelPath, - path_style: PathStyle, -) -> (SharedString, Option) { - // If path is empty, this means we're matching with the root directory itself - // so we use the path_prefix as the name - if path.is_empty() && !path_prefix.is_empty() { - return (path_prefix.display(path_style).to_string().into(), None); - } - - let full_path = path_prefix.join(path); - let file_name = full_path.file_name().unwrap_or_default(); - let display_path = full_path.display(path_style); - let (directory, file_name) = display_path.split_at(display_path.len() - file_name.len()); - ( - file_name.to_string().into(), - Some(SharedString::new(directory)).filter(|dir| !dir.is_empty()), - ) -} - -pub fn render_file_context_entry( - id: ElementId, - worktree_id: WorktreeId, - path: &Arc, - path_prefix: &Arc, - is_directory: bool, - path_style: PathStyle, - context_store: WeakEntity, - cx: &App, -) -> Stateful
{ - let (file_name, directory) = extract_file_name_and_directory(path, path_prefix, path_style); - - let added = context_store.upgrade().and_then(|context_store| { - let project_path = ProjectPath { - worktree_id, - path: path.clone(), - }; - if is_directory { - context_store - .read(cx) - .path_included_in_directory(&project_path, cx) - } else { - context_store.read(cx).file_path_included(&project_path, cx) - } - }); - - let file_icon = if is_directory { - FileIcons::get_folder_icon(false, path.as_std_path(), cx) - } else { - FileIcons::get_icon(path.as_std_path(), cx) - } - .map(Icon::from_path) - .unwrap_or_else(|| Icon::new(IconName::File)); - - h_flex() - .id(id) - .gap_1p5() - .w_full() - .child(file_icon.size(IconSize::Small).color(Color::Muted)) - .child( - h_flex() - .gap_1() - .child(Label::new(file_name)) - .children(directory.map(|directory| { - Label::new(directory) - .size(LabelSize::Small) - .color(Color::Muted) - })), - ) - .when_some(added, |el, added| match added { - FileInclusion::Direct => el.child( - h_flex() - .w_full() - .justify_end() - .gap_0p5() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Added").size(LabelSize::Small)), - ), - FileInclusion::InDirectory { full_path } => { - let directory_full_path = full_path.to_string_lossy().into_owned(); - - el.child( - h_flex() - .w_full() - .justify_end() - .gap_0p5() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Included").size(LabelSize::Small)), - ) - .tooltip(Tooltip::text(format!("in {directory_full_path}"))) - } - }) -} diff --git a/crates/agent_ui/src/context_picker/rules_context_picker.rs b/crates/agent_ui/src/context_picker/rules_context_picker.rs deleted file mode 100644 index 68f4917a4fd5689aab1a418dd78d2c8a322cd717..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/rules_context_picker.rs +++ /dev/null @@ -1,224 +0,0 @@ -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity}; -use picker::{Picker, PickerDelegate}; -use prompt_store::{PromptId, PromptStore, UserPromptId}; -use ui::{ListItem, prelude::*}; -use util::ResultExt as _; - -use crate::{ - context::RULES_ICON, - context_picker::ContextPicker, - context_store::{self, ContextStore}, -}; - -pub struct RulesContextPicker { - picker: Entity>, -} - -impl RulesContextPicker { - pub fn new( - prompt_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = RulesContextPickerDelegate::new(prompt_store, context_picker, context_store); - let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); - - RulesContextPicker { picker } - } -} - -impl Focusable for RulesContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) - } -} - -impl Render for RulesContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.picker.clone() - } -} - -#[derive(Debug, Clone)] -pub struct RulesContextEntry { - pub prompt_id: UserPromptId, - pub title: SharedString, -} - -pub struct RulesContextPickerDelegate { - prompt_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - matches: Vec, - selected_index: usize, -} - -impl RulesContextPickerDelegate { - pub fn new( - prompt_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - ) -> Self { - RulesContextPickerDelegate { - prompt_store, - context_picker, - context_store, - matches: Vec::new(), - selected_index: 0, - } - } -} - -impl PickerDelegate for RulesContextPickerDelegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - self.matches.len() - } - - fn selected_index(&self) -> usize { - self.selected_index - } - - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search available rules…".into() - } - - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let Some(prompt_store) = self.prompt_store.upgrade() else { - return Task::ready(()); - }; - let search_task = search_rules(query, Arc::new(AtomicBool::default()), &prompt_store, cx); - cx.spawn_in(window, async move |this, cx| { - let matches = search_task.await; - this.update(cx, |this, cx| { - this.delegate.matches = matches; - this.delegate.selected_index = 0; - cx.notify(); - }) - .ok(); - }) - } - - fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context>) { - let Some(entry) = self.matches.get(self.selected_index) else { - return; - }; - - self.context_store - .update(cx, |context_store, cx| { - context_store.add_rules(entry.prompt_id, true, cx) - }) - .log_err(); - } - - fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { - self.context_picker - .update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let thread = &self.matches.get(ix)?; - - Some(ListItem::new(ix).inset(true).toggle_state(selected).child( - render_thread_context_entry(thread, self.context_store.clone(), cx), - )) - } -} - -pub fn render_thread_context_entry( - user_rules: &RulesContextEntry, - context_store: WeakEntity, - cx: &mut App, -) -> Div { - let added = context_store.upgrade().is_some_and(|context_store| { - context_store - .read(cx) - .includes_user_rules(user_rules.prompt_id) - }); - - h_flex() - .gap_1p5() - .w_full() - .justify_between() - .child( - h_flex() - .gap_1p5() - .max_w_72() - .child( - Icon::new(RULES_ICON) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child(Label::new(user_rules.title.clone()).truncate()), - ) - .when(added, |el| { - el.child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Added").size(LabelSize::Small)), - ) - }) -} - -pub(crate) fn search_rules( - query: String, - cancellation_flag: Arc, - prompt_store: &Entity, - cx: &mut App, -) -> Task> { - let search_task = prompt_store.read(cx).search(query, cancellation_flag, cx); - cx.background_spawn(async move { - search_task - .await - .into_iter() - .flat_map(|metadata| { - // Default prompts are filtered out as they are automatically included. - if metadata.default { - None - } else { - match metadata.id { - PromptId::EditWorkflow => None, - PromptId::User { uuid } => Some(RulesContextEntry { - prompt_id: uuid, - title: metadata.title?, - }), - } - } - }) - .collect::>() - }) -} diff --git a/crates/agent_ui/src/context_picker/symbol_context_picker.rs b/crates/agent_ui/src/context_picker/symbol_context_picker.rs deleted file mode 100644 index fbce71d94efd84b1acc6e0b5d4ea11cb2b9243d5..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/symbol_context_picker.rs +++ /dev/null @@ -1,415 +0,0 @@ -use std::cmp::Reverse; -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use anyhow::{Result, anyhow}; -use fuzzy::{StringMatch, StringMatchCandidate}; -use gpui::{ - App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity, -}; -use ordered_float::OrderedFloat; -use picker::{Picker, PickerDelegate}; -use project::lsp_store::SymbolLocation; -use project::{DocumentSymbol, Symbol}; -use ui::{ListItem, prelude::*}; -use util::ResultExt as _; -use workspace::Workspace; - -use crate::{ - context::AgentContextHandle, context_picker::ContextPicker, context_store::ContextStore, -}; - -pub struct SymbolContextPicker { - picker: Entity>, -} - -impl SymbolContextPicker { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = SymbolContextPickerDelegate::new(context_picker, workspace, context_store); - let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); - - Self { picker } - } -} - -impl Focusable for SymbolContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) - } -} - -impl Render for SymbolContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.picker.clone() - } -} - -pub struct SymbolContextPickerDelegate { - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - matches: Vec, - selected_index: usize, -} - -impl SymbolContextPickerDelegate { - pub fn new( - context_picker: WeakEntity, - workspace: WeakEntity, - context_store: WeakEntity, - ) -> Self { - Self { - context_picker, - workspace, - context_store, - matches: Vec::new(), - selected_index: 0, - } - } -} - -impl PickerDelegate for SymbolContextPickerDelegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - self.matches.len() - } - - fn selected_index(&self) -> usize { - self.selected_index - } - - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search symbols…".into() - } - - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let Some(workspace) = self.workspace.upgrade() else { - return Task::ready(()); - }; - - let search_task = search_symbols(query, Arc::::default(), &workspace, cx); - let context_store = self.context_store.clone(); - cx.spawn_in(window, async move |this, cx| { - let symbols = search_task.await; - - let symbol_entries = context_store - .read_with(cx, |context_store, cx| { - compute_symbol_entries(symbols, context_store, cx) - }) - .log_err() - .unwrap_or_default(); - - this.update(cx, |this, _cx| { - this.delegate.matches = symbol_entries; - }) - .log_err(); - }) - } - - fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context>) { - let Some(mat) = self.matches.get(self.selected_index) else { - return; - }; - let Some(workspace) = self.workspace.upgrade() else { - return; - }; - - let add_symbol_task = add_symbol( - mat.symbol.clone(), - true, - workspace, - self.context_store.clone(), - cx, - ); - - let selected_index = self.selected_index; - cx.spawn(async move |this, cx| { - let (_, included) = add_symbol_task.await?; - this.update(cx, |this, _| { - if let Some(mat) = this.delegate.matches.get_mut(selected_index) { - mat.is_included = included; - } - }) - }) - .detach_and_log_err(cx); - } - - fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { - self.context_picker - .update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - _: &mut Context>, - ) -> Option { - let mat = &self.matches.get(ix)?; - - Some(ListItem::new(ix).inset(true).toggle_state(selected).child( - render_symbol_context_entry(ElementId::named_usize("symbol-ctx-picker", ix), mat), - )) - } -} - -pub(crate) struct SymbolEntry { - pub symbol: Symbol, - pub is_included: bool, -} - -pub(crate) fn add_symbol( - symbol: Symbol, - remove_if_exists: bool, - workspace: Entity, - context_store: WeakEntity, - cx: &mut App, -) -> Task, bool)>> { - let project = workspace.read(cx).project().clone(); - let open_buffer_task = project.update(cx, |project, cx| { - let SymbolLocation::InProject(symbol_path) = &symbol.path else { - return Task::ready(Err(anyhow!("can't add symbol from outside of project"))); - }; - project.open_buffer(symbol_path.clone(), cx) - }); - cx.spawn(async move |cx| { - let buffer = open_buffer_task.await?; - let document_symbols = project - .update(cx, |project, cx| project.document_symbols(&buffer, cx))? - .await?; - - // Try to find a matching document symbol. Document symbols include - // not only the symbol itself (e.g. function name), but they also - // include the context that they contain (e.g. function body). - let (name, range, enclosing_range) = if let Some(DocumentSymbol { - name, - range, - selection_range, - .. - }) = - find_matching_symbol(&symbol, document_symbols.as_slice()) - { - (name, selection_range, range) - } else { - // If we do not find a matching document symbol, fall back to - // just the symbol itself - (symbol.name, symbol.range.clone(), symbol.range) - }; - - let (range, enclosing_range) = buffer.read_with(cx, |buffer, _| { - ( - buffer.anchor_after(range.start)..buffer.anchor_before(range.end), - buffer.anchor_after(enclosing_range.start) - ..buffer.anchor_before(enclosing_range.end), - ) - })?; - - context_store.update(cx, move |context_store, cx| { - context_store.add_symbol( - buffer, - name.into(), - range, - enclosing_range, - remove_if_exists, - cx, - ) - }) - }) -} - -fn find_matching_symbol(symbol: &Symbol, candidates: &[DocumentSymbol]) -> Option { - let mut candidates = candidates.iter(); - let mut candidate = candidates.next()?; - - loop { - if candidate.range.start > symbol.range.end { - return None; - } - if candidate.range.end < symbol.range.start { - candidate = candidates.next()?; - continue; - } - if candidate.selection_range == symbol.range { - return Some(candidate.clone()); - } - if candidate.range.start <= symbol.range.start && symbol.range.end <= candidate.range.end { - candidates = candidate.children.iter(); - candidate = candidates.next()?; - continue; - } - return None; - } -} - -pub struct SymbolMatch { - pub symbol: Symbol, -} - -pub(crate) fn search_symbols( - query: String, - cancellation_flag: Arc, - workspace: &Entity, - cx: &mut App, -) -> Task> { - let symbols_task = workspace.update(cx, |workspace, cx| { - workspace - .project() - .update(cx, |project, cx| project.symbols(&query, cx)) - }); - let project = workspace.read(cx).project().clone(); - cx.spawn(async move |cx| { - let Some(symbols) = symbols_task.await.log_err() else { - return Vec::new(); - }; - let Some((visible_match_candidates, external_match_candidates)): Option<(Vec<_>, Vec<_>)> = - project - .update(cx, |project, cx| { - symbols - .iter() - .enumerate() - .map(|(id, symbol)| { - StringMatchCandidate::new(id, symbol.label.filter_text()) - }) - .partition(|candidate| match &symbols[candidate.id].path { - SymbolLocation::InProject(project_path) => project - .entry_for_path(project_path, cx) - .is_some_and(|e| !e.is_ignored), - SymbolLocation::OutsideProject { .. } => false, - }) - }) - .log_err() - else { - return Vec::new(); - }; - - const MAX_MATCHES: usize = 100; - let mut visible_matches = cx.background_executor().block(fuzzy::match_strings( - &visible_match_candidates, - &query, - false, - true, - MAX_MATCHES, - &cancellation_flag, - cx.background_executor().clone(), - )); - let mut external_matches = cx.background_executor().block(fuzzy::match_strings( - &external_match_candidates, - &query, - false, - true, - MAX_MATCHES - visible_matches.len().min(MAX_MATCHES), - &cancellation_flag, - cx.background_executor().clone(), - )); - let sort_key_for_match = |mat: &StringMatch| { - let symbol = &symbols[mat.candidate_id]; - (Reverse(OrderedFloat(mat.score)), symbol.label.filter_text()) - }; - - visible_matches.sort_unstable_by_key(sort_key_for_match); - external_matches.sort_unstable_by_key(sort_key_for_match); - let mut matches = visible_matches; - matches.append(&mut external_matches); - - matches - .into_iter() - .map(|mut mat| { - let symbol = symbols[mat.candidate_id].clone(); - let filter_start = symbol.label.filter_range.start; - for position in &mut mat.positions { - *position += filter_start; - } - SymbolMatch { symbol } - }) - .collect() - }) -} - -fn compute_symbol_entries( - symbols: Vec, - context_store: &ContextStore, - cx: &App, -) -> Vec { - symbols - .into_iter() - .map(|SymbolMatch { symbol, .. }| SymbolEntry { - is_included: context_store.includes_symbol(&symbol, cx), - symbol, - }) - .collect::>() -} - -pub fn render_symbol_context_entry(id: ElementId, entry: &SymbolEntry) -> Stateful
{ - let path = match &entry.symbol.path { - SymbolLocation::InProject(project_path) => { - project_path.path.file_name().unwrap_or_default().into() - } - SymbolLocation::OutsideProject { - abs_path, - signature: _, - } => abs_path - .file_name() - .map(|f| f.to_string_lossy()) - .unwrap_or_default(), - }; - let symbol_location = format!("{} L{}", path, entry.symbol.range.start.0.row + 1); - - h_flex() - .id(id) - .gap_1p5() - .w_full() - .child( - Icon::new(IconName::Code) - .size(IconSize::Small) - .color(Color::Muted), - ) - .child( - h_flex() - .gap_1() - .child(Label::new(&entry.symbol.name)) - .child( - Label::new(symbol_location) - .size(LabelSize::Small) - .color(Color::Muted), - ), - ) - .when(entry.is_included, |el| { - el.child( - h_flex() - .w_full() - .justify_end() - .gap_0p5() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Added").size(LabelSize::Small)), - ) - }) -} diff --git a/crates/agent_ui/src/context_picker/thread_context_picker.rs b/crates/agent_ui/src/context_picker/thread_context_picker.rs deleted file mode 100644 index d6a3a270742fe28c483d2d7d39894eb9e3c021ea..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_picker/thread_context_picker.rs +++ /dev/null @@ -1,280 +0,0 @@ -use std::sync::Arc; -use std::sync::atomic::AtomicBool; - -use crate::{ - context_picker::ContextPicker, - context_store::{self, ContextStore}, -}; -use agent::{HistoryEntry, HistoryStore}; -use fuzzy::StringMatchCandidate; -use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity}; -use picker::{Picker, PickerDelegate}; -use ui::{ListItem, prelude::*}; -use workspace::Workspace; - -pub struct ThreadContextPicker { - picker: Entity>, -} - -impl ThreadContextPicker { - pub fn new( - thread_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - workspace: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = ThreadContextPickerDelegate::new( - thread_store, - context_picker, - context_store, - workspace, - ); - let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); - - ThreadContextPicker { picker } - } -} - -impl Focusable for ThreadContextPicker { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) - } -} - -impl Render for ThreadContextPicker { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - self.picker.clone() - } -} - -pub struct ThreadContextPickerDelegate { - thread_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - workspace: WeakEntity, - matches: Vec, - selected_index: usize, -} - -impl ThreadContextPickerDelegate { - pub fn new( - thread_store: WeakEntity, - context_picker: WeakEntity, - context_store: WeakEntity, - workspace: WeakEntity, - ) -> Self { - ThreadContextPickerDelegate { - thread_store, - context_picker, - context_store, - workspace, - matches: Vec::new(), - selected_index: 0, - } - } -} - -impl PickerDelegate for ThreadContextPickerDelegate { - type ListItem = ListItem; - - fn match_count(&self) -> usize { - self.matches.len() - } - - fn selected_index(&self) -> usize { - self.selected_index - } - - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search threads…".into() - } - - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let Some(thread_store) = self.thread_store.upgrade() else { - return Task::ready(()); - }; - - let search_task = search_threads(query, Arc::new(AtomicBool::default()), &thread_store, cx); - cx.spawn_in(window, async move |this, cx| { - let matches = search_task.await; - this.update(cx, |this, cx| { - this.delegate.matches = matches; - this.delegate.selected_index = 0; - cx.notify(); - }) - .ok(); - }) - } - - fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context>) { - let Some(project) = self - .workspace - .upgrade() - .map(|w| w.read(cx).project().clone()) - else { - return; - }; - let Some((entry, thread_store)) = self - .matches - .get(self.selected_index) - .zip(self.thread_store.upgrade()) - else { - return; - }; - - match entry { - HistoryEntry::AcpThread(thread) => { - let load_thread_task = - agent::load_agent_thread(thread.id.clone(), thread_store, project, cx); - - cx.spawn(async move |this, cx| { - let thread = load_thread_task.await?; - this.update(cx, |this, cx| { - this.delegate - .context_store - .update(cx, |context_store, cx| { - context_store.add_thread(thread, true, cx) - }) - .ok(); - }) - }) - .detach_and_log_err(cx); - } - HistoryEntry::TextThread(thread) => { - let task = thread_store.update(cx, |this, cx| { - this.load_text_thread(thread.path.clone(), cx) - }); - - cx.spawn(async move |this, cx| { - let thread = task.await?; - this.update(cx, |this, cx| { - this.delegate - .context_store - .update(cx, |context_store, cx| { - context_store.add_text_thread(thread, true, cx) - }) - .ok(); - }) - }) - .detach_and_log_err(cx); - } - } - } - - fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { - self.context_picker - .update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - } - - fn render_match( - &self, - ix: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let thread = &self.matches.get(ix)?; - - Some(ListItem::new(ix).inset(true).toggle_state(selected).child( - render_thread_context_entry(thread, self.context_store.clone(), cx), - )) - } -} - -pub fn render_thread_context_entry( - entry: &HistoryEntry, - context_store: WeakEntity, - cx: &mut App, -) -> Div { - let is_added = match entry { - HistoryEntry::AcpThread(thread) => context_store - .upgrade() - .is_some_and(|ctx_store| ctx_store.read(cx).includes_thread(&thread.id)), - HistoryEntry::TextThread(thread) => context_store - .upgrade() - .is_some_and(|ctx_store| ctx_store.read(cx).includes_text_thread(&thread.path)), - }; - - h_flex() - .gap_1p5() - .w_full() - .justify_between() - .child( - h_flex() - .gap_1p5() - .max_w_72() - .child( - Icon::new(IconName::Thread) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child(Label::new(entry.title().clone()).truncate()), - ) - .when(is_added, |el| { - el.child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Added").size(LabelSize::Small)), - ) - }) -} - -pub(crate) fn search_threads( - query: String, - cancellation_flag: Arc, - thread_store: &Entity, - cx: &mut App, -) -> Task> { - let threads = thread_store.read(cx).entries().collect(); - if query.is_empty() { - return Task::ready(threads); - } - - let executor = cx.background_executor().clone(); - cx.background_spawn(async move { - let candidates = threads - .iter() - .enumerate() - .map(|(id, thread)| StringMatchCandidate::new(id, thread.title())) - .collect::>(); - let matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - 100, - &cancellation_flag, - executor, - ) - .await; - - matches - .into_iter() - .map(|mat| threads[mat.candidate_id].clone()) - .collect() - }) -} diff --git a/crates/agent_ui/src/context_store.rs b/crates/agent_ui/src/context_store.rs deleted file mode 100644 index 18aa59c8f716d59e4a0d717904b09472494c4dbc..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_store.rs +++ /dev/null @@ -1,614 +0,0 @@ -use crate::context::{ - AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle, - FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle, SelectionContextHandle, - SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, -}; -use agent_client_protocol as acp; -use anyhow::{Context as _, Result, anyhow}; -use assistant_text_thread::TextThread; -use collections::{HashSet, IndexSet}; -use futures::{self, FutureExt}; -use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity}; -use language::{Buffer, File as _}; -use language_model::LanguageModelImage; -use project::{ - Project, ProjectItem, ProjectPath, Symbol, image_store::is_image_file, - lsp_store::SymbolLocation, -}; -use prompt_store::UserPromptId; -use ref_cast::RefCast as _; -use std::{ - ops::Range, - path::{Path, PathBuf}, - sync::Arc, -}; -use text::{Anchor, OffsetRangeExt}; - -pub struct ContextStore { - project: WeakEntity, - next_context_id: ContextId, - context_set: IndexSet, - context_thread_ids: HashSet, - context_text_thread_paths: HashSet>, -} - -pub enum ContextStoreEvent { - ContextRemoved(AgentContextKey), -} - -impl EventEmitter for ContextStore {} - -impl ContextStore { - pub fn new(project: WeakEntity) -> Self { - Self { - project, - next_context_id: ContextId::zero(), - context_set: IndexSet::default(), - context_thread_ids: HashSet::default(), - context_text_thread_paths: HashSet::default(), - } - } - - pub fn context(&self) -> impl Iterator { - self.context_set.iter().map(|entry| entry.as_ref()) - } - - pub fn clear(&mut self, cx: &mut Context) { - self.context_set.clear(); - self.context_thread_ids.clear(); - cx.notify(); - } - - pub fn add_file_from_path( - &mut self, - project_path: ProjectPath, - remove_if_exists: bool, - cx: &mut Context, - ) -> Task>> { - let Some(project) = self.project.upgrade() else { - return Task::ready(Err(anyhow!("failed to read project"))); - }; - - if is_image_file(&project, &project_path, cx) { - self.add_image_from_path(project_path, remove_if_exists, cx) - } else { - cx.spawn(async move |this, cx| { - let open_buffer_task = project.update(cx, |project, cx| { - project.open_buffer(project_path.clone(), cx) - })?; - let buffer = open_buffer_task.await?; - this.update(cx, |this, cx| { - this.add_file_from_buffer(&project_path, buffer, remove_if_exists, cx) - }) - }) - } - } - - pub fn add_file_from_buffer( - &mut self, - project_path: &ProjectPath, - buffer: Entity, - remove_if_exists: bool, - cx: &mut Context, - ) -> Option { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::File(FileContextHandle { buffer, context_id }); - - if let Some(key) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(key.as_ref().clone()) - } - } else if self.path_included_in_directory(project_path, cx).is_some() { - None - } else { - self.insert_context(context.clone(), cx); - Some(context) - } - } - - pub fn add_directory( - &mut self, - project_path: &ProjectPath, - remove_if_exists: bool, - cx: &mut Context, - ) -> Result> { - let project = self.project.upgrade().context("failed to read project")?; - let entry_id = project - .read(cx) - .entry_for_path(project_path, cx) - .map(|entry| entry.id) - .context("no entry found for directory context")?; - - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::Directory(DirectoryContextHandle { - entry_id, - context_id, - }); - - let context = - if let Some(existing) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(existing.as_ref().clone()) - } - } else { - self.insert_context(context.clone(), cx); - Some(context) - }; - - anyhow::Ok(context) - } - - pub fn add_symbol( - &mut self, - buffer: Entity, - symbol: SharedString, - range: Range, - enclosing_range: Range, - remove_if_exists: bool, - cx: &mut Context, - ) -> (Option, bool) { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::Symbol(SymbolContextHandle { - buffer, - symbol, - range, - enclosing_range, - context_id, - }); - - if let Some(key) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - let handle = if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(key.as_ref().clone()) - }; - return (handle, false); - } - - let included = self.insert_context(context.clone(), cx); - (Some(context), included) - } - - pub fn add_thread( - &mut self, - thread: Entity, - remove_if_exists: bool, - cx: &mut Context, - ) -> Option { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::Thread(ThreadContextHandle { thread, context_id }); - - if let Some(existing) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(existing.as_ref().clone()) - } - } else { - self.insert_context(context.clone(), cx); - Some(context) - } - } - - pub fn add_text_thread( - &mut self, - text_thread: Entity, - remove_if_exists: bool, - cx: &mut Context, - ) -> Option { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::TextThread(TextThreadContextHandle { - text_thread, - context_id, - }); - - if let Some(existing) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(existing.as_ref().clone()) - } - } else { - self.insert_context(context.clone(), cx); - Some(context) - } - } - - pub fn add_rules( - &mut self, - prompt_id: UserPromptId, - remove_if_exists: bool, - cx: &mut Context, - ) -> Option { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::Rules(RulesContextHandle { - prompt_id, - context_id, - }); - - if let Some(existing) = self.context_set.get(AgentContextKey::ref_cast(&context)) { - if remove_if_exists { - self.remove_context(&context, cx); - None - } else { - Some(existing.as_ref().clone()) - } - } else { - self.insert_context(context.clone(), cx); - Some(context) - } - } - - pub fn add_fetched_url( - &mut self, - url: String, - text: impl Into, - cx: &mut Context, - ) -> AgentContextHandle { - let context = AgentContextHandle::FetchedUrl(FetchedUrlContext { - url: url.into(), - text: text.into(), - context_id: self.next_context_id.post_inc(), - }); - - self.insert_context(context.clone(), cx); - context - } - - pub fn add_image_from_path( - &mut self, - project_path: ProjectPath, - remove_if_exists: bool, - cx: &mut Context, - ) -> Task>> { - let project = self.project.clone(); - cx.spawn(async move |this, cx| { - let open_image_task = project.update(cx, |project, cx| { - project.open_image(project_path.clone(), cx) - })?; - let image_item = open_image_task.await?; - - this.update(cx, |this, cx| { - let item = image_item.read(cx); - this.insert_image( - Some(item.project_path(cx)), - Some(item.file.full_path(cx).to_string_lossy().into_owned()), - item.image.clone(), - remove_if_exists, - cx, - ) - }) - }) - } - - pub fn add_image_instance(&mut self, image: Arc, cx: &mut Context) { - self.insert_image(None, None, image, false, cx); - } - - fn insert_image( - &mut self, - project_path: Option, - full_path: Option, - image: Arc, - remove_if_exists: bool, - cx: &mut Context, - ) -> Option { - let image_task = LanguageModelImage::from_image(image.clone(), cx).shared(); - let context = AgentContextHandle::Image(ImageContext { - project_path, - full_path, - original_image: image, - image_task, - context_id: self.next_context_id.post_inc(), - }); - if self.has_context(&context) && remove_if_exists { - self.remove_context(&context, cx); - return None; - } - - self.insert_context(context.clone(), cx); - Some(context) - } - - pub fn add_selection( - &mut self, - buffer: Entity, - range: Range, - cx: &mut Context, - ) { - let context_id = self.next_context_id.post_inc(); - let context = AgentContextHandle::Selection(SelectionContextHandle { - buffer, - range, - context_id, - }); - self.insert_context(context, cx); - } - - pub fn add_suggested_context( - &mut self, - suggested: &SuggestedContext, - cx: &mut Context, - ) { - match suggested { - SuggestedContext::File { - buffer, - icon_path: _, - name: _, - } => { - if let Some(buffer) = buffer.upgrade() { - let context_id = self.next_context_id.post_inc(); - self.insert_context( - AgentContextHandle::File(FileContextHandle { buffer, context_id }), - cx, - ); - }; - } - SuggestedContext::TextThread { - text_thread, - name: _, - } => { - if let Some(text_thread) = text_thread.upgrade() { - let context_id = self.next_context_id.post_inc(); - self.insert_context( - AgentContextHandle::TextThread(TextThreadContextHandle { - text_thread, - context_id, - }), - cx, - ); - } - } - } - } - - fn insert_context(&mut self, context: AgentContextHandle, cx: &mut Context) -> bool { - match &context { - // AgentContextHandle::Thread(thread_context) => { - // if let Some(thread_store) = self.thread_store.clone() { - // thread_context.thread.update(cx, |thread, cx| { - // thread.start_generating_detailed_summary_if_needed(thread_store, cx); - // }); - // self.context_thread_ids - // .insert(thread_context.thread.read(cx).id().clone()); - // } else { - // return false; - // } - // } - AgentContextHandle::TextThread(text_thread_context) => { - self.context_text_thread_paths - .extend(text_thread_context.text_thread.read(cx).path().cloned()); - } - _ => {} - } - let inserted = self.context_set.insert(AgentContextKey(context)); - if inserted { - cx.notify(); - } - inserted - } - - pub fn remove_context(&mut self, context: &AgentContextHandle, cx: &mut Context) { - if let Some((_, key)) = self - .context_set - .shift_remove_full(AgentContextKey::ref_cast(context)) - { - match context { - AgentContextHandle::Thread(thread_context) => { - self.context_thread_ids - .remove(thread_context.thread.read(cx).id()); - } - AgentContextHandle::TextThread(text_thread_context) => { - if let Some(path) = text_thread_context.text_thread.read(cx).path() { - self.context_text_thread_paths.remove(path); - } - } - _ => {} - } - cx.emit(ContextStoreEvent::ContextRemoved(key)); - cx.notify(); - } - } - - pub fn has_context(&mut self, context: &AgentContextHandle) -> bool { - self.context_set - .contains(AgentContextKey::ref_cast(context)) - } - - /// Returns whether this file path is already included directly in the context, or if it will be - /// included in the context via a directory. - pub fn file_path_included(&self, path: &ProjectPath, cx: &App) -> Option { - let project = self.project.upgrade()?.read(cx); - self.context().find_map(|context| match context { - AgentContextHandle::File(file_context) => { - FileInclusion::check_file(file_context, path, cx) - } - AgentContextHandle::Image(image_context) => { - FileInclusion::check_image(image_context, path) - } - AgentContextHandle::Directory(directory_context) => { - FileInclusion::check_directory(directory_context, path, project, cx) - } - _ => None, - }) - } - - pub fn path_included_in_directory( - &self, - path: &ProjectPath, - cx: &App, - ) -> Option { - let project = self.project.upgrade()?.read(cx); - self.context().find_map(|context| match context { - AgentContextHandle::Directory(directory_context) => { - FileInclusion::check_directory(directory_context, path, project, cx) - } - _ => None, - }) - } - - pub fn includes_symbol(&self, symbol: &Symbol, cx: &App) -> bool { - self.context().any(|context| match context { - AgentContextHandle::Symbol(context) => { - if context.symbol != symbol.name { - return false; - } - let buffer = context.buffer.read(cx); - let Some(context_path) = buffer.project_path(cx) else { - return false; - }; - if symbol.path != SymbolLocation::InProject(context_path) { - return false; - } - let context_range = context.range.to_point_utf16(&buffer.snapshot()); - context_range.start == symbol.range.start.0 - && context_range.end == symbol.range.end.0 - } - _ => false, - }) - } - - pub fn includes_thread(&self, thread_id: &acp::SessionId) -> bool { - self.context_thread_ids.contains(thread_id) - } - - pub fn includes_text_thread(&self, path: &Arc) -> bool { - self.context_text_thread_paths.contains(path) - } - - pub fn includes_user_rules(&self, prompt_id: UserPromptId) -> bool { - self.context_set - .contains(&RulesContextHandle::lookup_key(prompt_id)) - } - - pub fn includes_url(&self, url: impl Into) -> bool { - self.context_set - .contains(&FetchedUrlContext::lookup_key(url.into())) - } - - pub fn get_url_context(&self, url: SharedString) -> Option { - self.context_set - .get(&FetchedUrlContext::lookup_key(url)) - .map(|key| key.as_ref().clone()) - } - - pub fn file_paths(&self, cx: &App) -> HashSet { - self.context() - .filter_map(|context| match context { - AgentContextHandle::File(file) => { - let buffer = file.buffer.read(cx); - buffer.project_path(cx) - } - AgentContextHandle::Directory(_) - | AgentContextHandle::Symbol(_) - | AgentContextHandle::Thread(_) - | AgentContextHandle::Selection(_) - | AgentContextHandle::FetchedUrl(_) - | AgentContextHandle::TextThread(_) - | AgentContextHandle::Rules(_) - | AgentContextHandle::Image(_) => None, - }) - .collect() - } - - pub fn thread_ids(&self) -> &HashSet { - &self.context_thread_ids - } -} - -#[derive(Clone)] -pub enum SuggestedContext { - File { - name: SharedString, - icon_path: Option, - buffer: WeakEntity, - }, - TextThread { - name: SharedString, - text_thread: WeakEntity, - }, -} - -impl SuggestedContext { - pub fn name(&self) -> &SharedString { - match self { - Self::File { name, .. } => name, - Self::TextThread { name, .. } => name, - } - } - - pub fn icon_path(&self) -> Option { - match self { - Self::File { icon_path, .. } => icon_path.clone(), - Self::TextThread { .. } => None, - } - } - - pub fn kind(&self) -> ContextKind { - match self { - Self::File { .. } => ContextKind::File, - Self::TextThread { .. } => ContextKind::TextThread, - } - } -} - -pub enum FileInclusion { - Direct, - InDirectory { full_path: PathBuf }, -} - -impl FileInclusion { - fn check_file(file_context: &FileContextHandle, path: &ProjectPath, cx: &App) -> Option { - let file_path = file_context.buffer.read(cx).project_path(cx)?; - if path == &file_path { - Some(FileInclusion::Direct) - } else { - None - } - } - - fn check_image(image_context: &ImageContext, path: &ProjectPath) -> Option { - let image_path = image_context.project_path.as_ref()?; - if path == image_path { - Some(FileInclusion::Direct) - } else { - None - } - } - - fn check_directory( - directory_context: &DirectoryContextHandle, - path: &ProjectPath, - project: &Project, - cx: &App, - ) -> Option { - let worktree = project - .worktree_for_entry(directory_context.entry_id, cx)? - .read(cx); - let entry = worktree.entry_for_id(directory_context.entry_id)?; - let directory_path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - if path.starts_with(&directory_path) { - if path == &directory_path { - Some(FileInclusion::Direct) - } else { - Some(FileInclusion::InDirectory { - full_path: worktree.full_path(&entry.path), - }) - } - } else { - None - } - } -} diff --git a/crates/agent_ui/src/context_strip.rs b/crates/agent_ui/src/context_strip.rs deleted file mode 100644 index d2393ac4f612cebc6cf97d10a38894e7022e53b9..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/context_strip.rs +++ /dev/null @@ -1,619 +0,0 @@ -use crate::{ - AcceptSuggestedContext, AgentPanel, FocusDown, FocusLeft, FocusRight, FocusUp, - ModelUsageContext, RemoveAllContext, RemoveFocusedContext, ToggleContextPicker, - context_picker::ContextPicker, - ui::{AddedContext, ContextPill}, -}; -use crate::{ - context::AgentContextHandle, - context_store::{ContextStore, SuggestedContext}, -}; -use agent::HistoryStore; -use collections::HashSet; -use editor::Editor; -use gpui::{ - App, Bounds, ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - Subscription, Task, WeakEntity, -}; -use itertools::Itertools; -use project::ProjectItem; -use prompt_store::PromptStore; -use rope::Point; -use std::rc::Rc; -use text::ToPoint as _; -use ui::{PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*}; -use util::ResultExt as _; -use workspace::Workspace; -use zed_actions::assistant::OpenRulesLibrary; - -pub struct ContextStrip { - context_store: Entity, - context_picker: Entity, - context_picker_menu_handle: PopoverMenuHandle, - focus_handle: FocusHandle, - suggest_context_kind: SuggestContextKind, - workspace: WeakEntity, - prompt_store: Option>, - _subscriptions: Vec, - focused_index: Option, - children_bounds: Option>>, - model_usage_context: ModelUsageContext, -} - -impl ContextStrip { - pub fn new( - context_store: Entity, - workspace: WeakEntity, - thread_store: Option>, - prompt_store: Option>, - context_picker_menu_handle: PopoverMenuHandle, - suggest_context_kind: SuggestContextKind, - model_usage_context: ModelUsageContext, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let context_picker = cx.new(|cx| { - ContextPicker::new( - workspace.clone(), - thread_store.clone(), - prompt_store.clone(), - context_store.downgrade(), - window, - cx, - ) - }); - - let focus_handle = cx.focus_handle(); - - let subscriptions = vec![ - cx.observe(&context_store, |_, _, cx| cx.notify()), - cx.subscribe_in(&context_picker, window, Self::handle_context_picker_event), - cx.on_focus(&focus_handle, window, Self::handle_focus), - cx.on_blur(&focus_handle, window, Self::handle_blur), - ]; - - Self { - context_store: context_store.clone(), - context_picker, - context_picker_menu_handle, - focus_handle, - suggest_context_kind, - workspace, - prompt_store, - _subscriptions: subscriptions, - focused_index: None, - children_bounds: None, - model_usage_context, - } - } - - /// Whether or not the context strip has items to display - pub fn has_context_items(&self, cx: &App) -> bool { - self.context_store.read(cx).context().next().is_some() - || self.suggested_context(cx).is_some() - } - - fn added_contexts(&self, cx: &App) -> Vec { - if let Some(workspace) = self.workspace.upgrade() { - let project = workspace.read(cx).project().read(cx); - let prompt_store = self.prompt_store.as_ref().and_then(|p| p.upgrade()); - - let current_model = self.model_usage_context.language_model(cx); - - self.context_store - .read(cx) - .context() - .flat_map(|context| { - AddedContext::new_pending( - context.clone(), - prompt_store.as_ref(), - project, - current_model.as_ref(), - cx, - ) - }) - .collect::>() - } else { - Vec::new() - } - } - - fn suggested_context(&self, cx: &App) -> Option { - match self.suggest_context_kind { - SuggestContextKind::Thread => self.suggested_thread(cx), - } - } - - fn suggested_thread(&self, cx: &App) -> Option { - if !self.context_picker.read(cx).allow_threads() { - return None; - } - - let workspace = self.workspace.upgrade()?; - let panel = workspace.read(cx).panel::(cx)?.read(cx); - - if let Some(active_text_thread_editor) = panel.active_text_thread_editor() { - let text_thread = active_text_thread_editor.read(cx).text_thread(); - let weak_text_thread = text_thread.downgrade(); - let text_thread = text_thread.read(cx); - let path = text_thread.path()?; - - if self.context_store.read(cx).includes_text_thread(path) { - return None; - } - - Some(SuggestedContext::TextThread { - name: text_thread.summary().or_default(), - text_thread: weak_text_thread, - }) - } else { - None - } - } - - fn handle_context_picker_event( - &mut self, - _picker: &Entity, - _event: &DismissEvent, - _window: &mut Window, - cx: &mut Context, - ) { - cx.emit(ContextStripEvent::PickerDismissed); - } - - fn handle_focus(&mut self, _window: &mut Window, cx: &mut Context) { - self.focused_index = self.last_pill_index(); - cx.notify(); - } - - fn handle_blur(&mut self, _window: &mut Window, cx: &mut Context) { - self.focused_index = None; - cx.notify(); - } - - fn focus_left(&mut self, _: &FocusLeft, _window: &mut Window, cx: &mut Context) { - self.focused_index = match self.focused_index { - Some(index) if index > 0 => Some(index - 1), - _ => self.last_pill_index(), - }; - - cx.notify(); - } - - fn focus_right(&mut self, _: &FocusRight, _window: &mut Window, cx: &mut Context) { - let Some(last_index) = self.last_pill_index() else { - return; - }; - - self.focused_index = match self.focused_index { - Some(index) if index < last_index => Some(index + 1), - _ => Some(0), - }; - - cx.notify(); - } - - fn focus_up(&mut self, _: &FocusUp, _window: &mut Window, cx: &mut Context) { - let Some(focused_index) = self.focused_index else { - return; - }; - - if focused_index == 0 { - return cx.emit(ContextStripEvent::BlurredUp); - } - - let Some((focused, pills)) = self.focused_bounds(focused_index) else { - return; - }; - - let iter = pills[..focused_index].iter().enumerate().rev(); - self.focused_index = Self::find_best_horizontal_match(focused, iter).or(Some(0)); - cx.notify(); - } - - fn focus_down(&mut self, _: &FocusDown, _window: &mut Window, cx: &mut Context) { - let Some(focused_index) = self.focused_index else { - return; - }; - - let last_index = self.last_pill_index(); - - if self.focused_index == last_index { - return cx.emit(ContextStripEvent::BlurredDown); - } - - let Some((focused, pills)) = self.focused_bounds(focused_index) else { - return; - }; - - let iter = pills.iter().enumerate().skip(focused_index + 1); - self.focused_index = Self::find_best_horizontal_match(focused, iter).or(last_index); - cx.notify(); - } - - fn focused_bounds(&self, focused: usize) -> Option<(&Bounds, &[Bounds])> { - let pill_bounds = self.pill_bounds()?; - let focused = pill_bounds.get(focused)?; - - Some((focused, pill_bounds)) - } - - fn pill_bounds(&self) -> Option<&[Bounds]> { - let bounds = self.children_bounds.as_ref()?; - let eraser = if bounds.len() < 3 { 0 } else { 1 }; - let pills = &bounds[1..bounds.len() - eraser]; - - if pills.is_empty() { None } else { Some(pills) } - } - - fn last_pill_index(&self) -> Option { - Some(self.pill_bounds()?.len() - 1) - } - - fn find_best_horizontal_match<'a>( - focused: &'a Bounds, - iter: impl Iterator)>, - ) -> Option { - let mut best = None; - - let focused_left = focused.left(); - let focused_right = focused.right(); - - for (index, probe) in iter { - if probe.origin.y == focused.origin.y { - continue; - } - - let overlap = probe.right().min(focused_right) - probe.left().max(focused_left); - - best = match best { - Some((_, prev_overlap, y)) if probe.origin.y != y || prev_overlap > overlap => { - break; - } - Some(_) | None => Some((index, overlap, probe.origin.y)), - }; - } - - best.map(|(index, _, _)| index) - } - - fn open_context(&mut self, context: &AgentContextHandle, window: &mut Window, cx: &mut App) { - let Some(workspace) = self.workspace.upgrade() else { - return; - }; - - match context { - AgentContextHandle::File(file_context) => { - if let Some(project_path) = file_context.project_path(cx) { - workspace.update(cx, |workspace, cx| { - workspace - .open_path(project_path, None, true, window, cx) - .detach_and_log_err(cx); - }); - } - } - - AgentContextHandle::Directory(directory_context) => { - let entry_id = directory_context.entry_id; - workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |_project, cx| { - cx.emit(project::Event::RevealInProjectPanel(entry_id)); - }) - }) - } - - AgentContextHandle::Symbol(symbol_context) => { - let buffer = symbol_context.buffer.read(cx); - if let Some(project_path) = buffer.project_path(cx) { - let snapshot = buffer.snapshot(); - let target_position = symbol_context.range.start.to_point(&snapshot); - open_editor_at_position(project_path, target_position, &workspace, window, cx) - .detach(); - } - } - - AgentContextHandle::Selection(selection_context) => { - let buffer = selection_context.buffer.read(cx); - if let Some(project_path) = buffer.project_path(cx) { - let snapshot = buffer.snapshot(); - let target_position = selection_context.range.start.to_point(&snapshot); - - open_editor_at_position(project_path, target_position, &workspace, window, cx) - .detach(); - } - } - - AgentContextHandle::FetchedUrl(fetched_url_context) => { - cx.open_url(&fetched_url_context.url); - } - - AgentContextHandle::Thread(_thread_context) => {} - - AgentContextHandle::TextThread(text_thread_context) => { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = workspace.panel::(cx) { - let context = text_thread_context.text_thread.clone(); - window.defer(cx, move |window, cx| { - panel.update(cx, |panel, cx| { - panel.open_text_thread(context, window, cx) - }); - }); - } - }) - } - - AgentContextHandle::Rules(rules_context) => window.dispatch_action( - Box::new(OpenRulesLibrary { - prompt_to_select: Some(rules_context.prompt_id.0), - }), - cx, - ), - - AgentContextHandle::Image(_) => {} - } - } - - fn remove_focused_context( - &mut self, - _: &RemoveFocusedContext, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(index) = self.focused_index { - let added_contexts = self.added_contexts(cx); - let Some(context) = added_contexts.get(index) else { - return; - }; - - self.context_store.update(cx, |this, cx| { - this.remove_context(&context.handle, cx); - }); - - let is_now_empty = added_contexts.len() == 1; - if is_now_empty { - cx.emit(ContextStripEvent::BlurredEmpty); - } else { - self.focused_index = Some(index.saturating_sub(1)); - cx.notify(); - } - } - } - - fn is_suggested_focused(&self, added_contexts: &Vec) -> bool { - // We only suggest one item after the actual context - self.focused_index == Some(added_contexts.len()) - } - - fn accept_suggested_context( - &mut self, - _: &AcceptSuggestedContext, - _window: &mut Window, - cx: &mut Context, - ) { - if let Some(suggested) = self.suggested_context(cx) - && self.is_suggested_focused(&self.added_contexts(cx)) - { - self.add_suggested_context(&suggested, cx); - } - } - - fn add_suggested_context(&mut self, suggested: &SuggestedContext, cx: &mut Context) { - self.context_store.update(cx, |context_store, cx| { - context_store.add_suggested_context(suggested, cx) - }); - cx.notify(); - } -} - -impl Focusable for ContextStrip { - fn focus_handle(&self, _cx: &App) -> FocusHandle { - self.focus_handle.clone() - } -} - -impl Render for ContextStrip { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let context_picker = self.context_picker.clone(); - let focus_handle = self.focus_handle.clone(); - - let added_contexts = self.added_contexts(cx); - let dupe_names = added_contexts - .iter() - .map(|c| c.name.clone()) - .sorted() - .tuple_windows() - .filter(|(a, b)| a == b) - .map(|(a, _)| a) - .collect::>(); - let no_added_context = added_contexts.is_empty(); - - let suggested_context = self.suggested_context(cx).map(|suggested_context| { - ( - suggested_context, - self.is_suggested_focused(&added_contexts), - ) - }); - - h_flex() - .flex_wrap() - .gap_1() - .track_focus(&focus_handle) - .key_context("ContextStrip") - .on_action(cx.listener(Self::focus_up)) - .on_action(cx.listener(Self::focus_right)) - .on_action(cx.listener(Self::focus_down)) - .on_action(cx.listener(Self::focus_left)) - .on_action(cx.listener(Self::remove_focused_context)) - .on_action(cx.listener(Self::accept_suggested_context)) - .on_children_prepainted({ - let entity = cx.entity().downgrade(); - move |children_bounds, _window, cx| { - entity - .update(cx, |this, _| { - this.children_bounds = Some(children_bounds); - }) - .ok(); - } - }) - .child( - PopoverMenu::new("context-picker") - .menu({ - let context_picker = context_picker.clone(); - move |window, cx| { - context_picker.update(cx, |this, cx| { - this.init(window, cx); - }); - - Some(context_picker.clone()) - } - }) - .on_open({ - let context_picker = context_picker.downgrade(); - Rc::new(move |window, cx| { - context_picker - .update(cx, |context_picker, cx| { - context_picker.select_first(window, cx); - }) - .ok(); - }) - }) - .trigger_with_tooltip( - IconButton::new("add-context", IconName::Plus) - .icon_size(IconSize::Small) - .style(ui::ButtonStyle::Filled), - { - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "Add Context", - &ToggleContextPicker, - &focus_handle, - cx, - ) - } - }, - ) - .attach(gpui::Corner::TopLeft) - .anchor(gpui::Corner::BottomLeft) - .offset(gpui::Point { - x: px(0.0), - y: px(-2.0), - }) - .with_handle(self.context_picker_menu_handle.clone()), - ) - .children( - added_contexts - .into_iter() - .enumerate() - .map(|(i, added_context)| { - let name = added_context.name.clone(); - let context = added_context.handle.clone(); - ContextPill::added( - added_context, - dupe_names.contains(&name), - self.focused_index == Some(i), - Some({ - let context = context.clone(); - let context_store = self.context_store.clone(); - Rc::new(cx.listener(move |_this, _event, _window, cx| { - context_store.update(cx, |this, cx| { - this.remove_context(&context, cx); - }); - cx.notify(); - })) - }), - ) - .on_click({ - Rc::new(cx.listener(move |this, event: &ClickEvent, window, cx| { - if event.click_count() > 1 { - this.open_context(&context, window, cx); - } else { - this.focused_index = Some(i); - } - cx.notify(); - })) - }) - }), - ) - .when_some(suggested_context, |el, (suggested, focused)| { - el.child( - ContextPill::suggested( - suggested.name().clone(), - suggested.icon_path(), - suggested.kind(), - focused, - ) - .on_click(Rc::new(cx.listener( - move |this, _event, _window, cx| { - this.add_suggested_context(&suggested, cx); - }, - ))), - ) - }) - .when(!no_added_context, { - move |parent| { - parent.child( - IconButton::new("remove-all-context", IconName::Eraser) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "Remove All Context", - &RemoveAllContext, - &focus_handle, - cx, - ) - } - }) - .on_click(cx.listener({ - let focus_handle = focus_handle.clone(); - move |_this, _event, window, cx| { - focus_handle.dispatch_action(&RemoveAllContext, window, cx); - } - })), - ) - } - }) - .into_any() - } -} - -pub enum ContextStripEvent { - PickerDismissed, - BlurredEmpty, - BlurredDown, - BlurredUp, -} - -impl EventEmitter for ContextStrip {} - -pub enum SuggestContextKind { - Thread, -} - -fn open_editor_at_position( - project_path: project::ProjectPath, - target_position: Point, - workspace: &Entity, - window: &mut Window, - cx: &mut App, -) -> Task<()> { - let open_task = workspace.update(cx, |workspace, cx| { - workspace.open_path(project_path, None, true, window, cx) - }); - window.spawn(cx, async move |cx| { - if let Some(active_editor) = open_task - .await - .log_err() - .and_then(|item| item.downcast::()) - { - active_editor - .downgrade() - .update_in(cx, |editor, window, cx| { - editor.go_to_singleton_buffer_point(target_position, window, cx); - }) - .log_err(); - } - }) -} diff --git a/crates/agent_ui/src/favorite_models.rs b/crates/agent_ui/src/favorite_models.rs new file mode 100644 index 0000000000000000000000000000000000000000..d8d4db976fc9916973eedd9174925fba75a06b2b --- /dev/null +++ b/crates/agent_ui/src/favorite_models.rs @@ -0,0 +1,57 @@ +use std::sync::Arc; + +use agent_client_protocol::ModelId; +use fs::Fs; +use language_model::LanguageModel; +use settings::{LanguageModelSelection, update_settings_file}; +use ui::App; + +fn language_model_to_selection(model: &Arc) -> LanguageModelSelection { + LanguageModelSelection { + provider: model.provider_id().to_string().into(), + model: model.id().0.to_string(), + } +} + +fn model_id_to_selection(model_id: &ModelId) -> LanguageModelSelection { + let id = model_id.0.as_ref(); + let (provider, model) = id.split_once('/').unwrap_or(("", id)); + LanguageModelSelection { + provider: provider.to_owned().into(), + model: model.to_owned(), + } +} + +pub fn toggle_in_settings( + model: Arc, + should_be_favorite: bool, + fs: Arc, + cx: &App, +) { + let selection = language_model_to_selection(&model); + update_settings_file(fs, cx, move |settings, _| { + let agent = settings.agent.get_or_insert_default(); + if should_be_favorite { + agent.add_favorite_model(selection.clone()); + } else { + agent.remove_favorite_model(&selection); + } + }); +} + +pub fn toggle_model_id_in_settings( + model_id: ModelId, + should_be_favorite: bool, + fs: Arc, + cx: &App, +) { + let selection = model_id_to_selection(&model_id); + update_settings_file(fs, cx, move |settings, _| { + let agent = settings.agent.get_or_insert_default(); + if should_be_favorite { + agent.add_favorite_model(selection.clone()); + } else { + agent.remove_favorite_model(&selection); + } + }); +} diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index b05dba59e6b19fa5091903882748de853cd9cb93..671579f9ef018b495b7993279a852595c78d3e02 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1,21 +1,26 @@ +use language_model::AnthropicEventData; +use language_model::report_anthropic_event; use std::cmp; use std::mem; use std::ops::Range; use std::rc::Rc; use std::sync::Arc; +use uuid::Uuid; +use crate::context::load_context; +use crate::mention_set::MentionSet; use crate::{ AgentPanel, buffer_codegen::{BufferCodegen, CodegenAlternative, CodegenEvent}, - context_store::ContextStore, inline_prompt_editor::{CodegenStatus, InlineAssistId, PromptEditor, PromptEditorEvent}, terminal_inline_assistant::TerminalInlineAssistant, }; use agent::HistoryStore; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; -use client::telemetry::Telemetry; use collections::{HashMap, HashSet, VecDeque, hash_map}; +use editor::EditorSnapshot; +use editor::MultiBufferOffset; use editor::RowExt; use editor::SelectionEffects; use editor::scroll::ScrollOffset; @@ -29,20 +34,19 @@ use editor::{ }, }; use fs::Fs; +use futures::{FutureExt, channel::mpsc}; use gpui::{ App, Context, Entity, Focusable, Global, HighlightStyle, Subscription, Task, UpdateGlobal, WeakEntity, Window, point, }; use language::{Buffer, Point, Selection, TransactionId}; -use language_model::{ - ConfigurationError, ConfiguredModel, LanguageModelRegistry, report_assistant_event, -}; +use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry}; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; use project::{CodeAction, DisableAiSettings, LspAction, Project, ProjectTransaction}; use prompt_store::{PromptBuilder, PromptStore}; use settings::{Settings, SettingsStore}; -use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; + use terminal_view::{TerminalView, terminal_panel::TerminalPanel}; use text::{OffsetRangeExt, ToPoint as _}; use ui::prelude::*; @@ -50,13 +54,8 @@ use util::{RangeExt, ResultExt, maybe}; use workspace::{ItemHandle, Toast, Workspace, dock::Panel, notifications::NotificationId}; use zed_actions::agent::OpenSettings; -pub fn init( - fs: Arc, - prompt_builder: Arc, - telemetry: Arc, - cx: &mut App, -) { - cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry)); +pub fn init(fs: Arc, prompt_builder: Arc, cx: &mut App) { + cx.set_global(InlineAssistant::new(fs, prompt_builder)); cx.observe_global::(|cx| { if DisableAiSettings::get_global(cx).disable_ai { @@ -96,18 +95,14 @@ pub struct InlineAssistant { confirmed_assists: HashMap>, prompt_history: VecDeque, prompt_builder: Arc, - telemetry: Arc, fs: Arc, + _inline_assistant_completions: Option>>, } impl Global for InlineAssistant {} impl InlineAssistant { - pub fn new( - fs: Arc, - prompt_builder: Arc, - telemetry: Arc, - ) -> Self { + pub fn new(fs: Arc, prompt_builder: Arc) -> Self { Self { next_assist_id: InlineAssistId::default(), next_assist_group_id: InlineAssistGroupId::default(), @@ -117,8 +112,8 @@ impl InlineAssistant { confirmed_assists: HashMap::default(), prompt_history: VecDeque::default(), prompt_builder, - telemetry, fs, + _inline_assistant_completions: None, } } @@ -212,16 +207,10 @@ impl InlineAssistant { if let Some(editor) = item.act_as::(cx) { editor.update(cx, |editor, cx| { if is_ai_enabled { - let panel = workspace.read(cx).panel::(cx); - let thread_store = panel - .as_ref() - .map(|agent_panel| agent_panel.read(cx).thread_store().downgrade()); - editor.add_code_action_provider( Rc::new(AssistantCodeActionProvider { editor: cx.entity().downgrade(), workspace: workspace.downgrade(), - thread_store, }), window, cx, @@ -233,9 +222,6 @@ impl InlineAssistant { editor.cancel(&Default::default(), window, cx); } } - - // Remove the Assistant1 code action provider, as it still might be registered. - editor.remove_code_action_provider("assistant".into(), window, cx); } else { editor.remove_code_action_provider( ASSISTANT_CODE_ACTION_PROVIDER_ID.into(), @@ -277,8 +263,7 @@ impl InlineAssistant { let agent_panel = agent_panel.read(cx); let prompt_store = agent_panel.prompt_store().as_ref().cloned(); - let thread_store = Some(agent_panel.thread_store().downgrade()); - let context_store = agent_panel.inline_assist_context_store().clone(); + let thread_store = agent_panel.thread_store().clone(); let handle_assist = |window: &mut Window, cx: &mut Context| match inline_assist_target { @@ -287,14 +272,13 @@ impl InlineAssistant { assistant.assist( &active_editor, cx.entity().downgrade(), - context_store, workspace.project().downgrade(), - prompt_store, thread_store, + prompt_store, action.prompt.clone(), window, cx, - ) + ); }) } InlineAssistTarget::Terminal(active_terminal) => { @@ -303,13 +287,13 @@ impl InlineAssistant { &active_terminal, cx.entity().downgrade(), workspace.project().downgrade(), - prompt_store, thread_store, + prompt_store, action.prompt.clone(), window, cx, - ) - }) + ); + }); } }; @@ -350,25 +334,20 @@ impl InlineAssistant { } } - pub fn assist( + fn codegen_ranges( &mut self, editor: &Entity, - workspace: WeakEntity, - context_store: Entity, - project: WeakEntity, - prompt_store: Option>, - thread_store: Option>, - initial_prompt: Option, + snapshot: &EditorSnapshot, window: &mut Window, cx: &mut App, - ) { - let (snapshot, initial_selections, newest_selection) = editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(window, cx); - let selections = editor.selections.all::(&snapshot.display_snapshot); - let newest_selection = editor - .selections - .newest::(&snapshot.display_snapshot); - (snapshot, selections, newest_selection) + ) -> Option<(Vec>, Selection)> { + let (initial_selections, newest_selection) = editor.update(cx, |editor, _| { + ( + editor.selections.all::(&snapshot.display_snapshot), + editor + .selections + .newest::(&snapshot.display_snapshot), + ) }); // Check if there is already an inline assistant that contains the @@ -381,7 +360,7 @@ impl InlineAssistant { && newest_selection.end.row <= range.end.row { self.focus_assist(*assist_id, window, cx); - return; + return None; } } } @@ -389,17 +368,9 @@ impl InlineAssistant { let mut selections = Vec::>::new(); let mut newest_selection = None; for mut selection in initial_selections { - if selection.end > selection.start { - selection.start.column = 0; - // If the selection ends at the start of the line, we don't want to include it. - if selection.end.column == 0 { - selection.end.row -= 1; - } - selection.end.column = snapshot - .buffer_snapshot() - .line_len(MultiBufferRow(selection.end.row)); - } else if let Some(fold) = - snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row)) + if selection.end == selection.start + && let Some(fold) = + snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row)) { selection.start = fold.range().start; selection.end = fold.range().end; @@ -426,6 +397,15 @@ impl InlineAssistant { } } } + } else { + selection.start.column = 0; + // If the selection ends at the start of the line, we don't want to include it. + if selection.end.column == 0 && selection.start.row != selection.end.row { + selection.end.row -= 1; + } + selection.end.column = snapshot + .buffer_snapshot() + .line_len(MultiBufferRow(selection.end.row)); } if let Some(prev_selection) = selections.last_mut() @@ -452,28 +432,55 @@ impl InlineAssistant { { let anchor_range = Anchor::range_in_buffer( excerpt_id, - buffer.remote_id(), buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end), ); codegen_ranges.push(anchor_range); if let Some(model) = LanguageModelRegistry::read_global(cx).inline_assistant_model() { - self.telemetry.report_assistant_event(AssistantEventData { - conversation_id: None, - kind: AssistantKind::Inline, - phase: AssistantPhase::Invoked, - message_id: None, - model: model.model.telemetry_id(), - model_provider: model.provider.id().to_string(), - response_latency: None, - error_message: None, - language_name: buffer.language().map(|language| language.name().to_proto()), - }); + telemetry::event!( + "Assistant Invoked", + kind = "inline", + phase = "invoked", + model = model.model.telemetry_id(), + model_provider = model.provider.id().to_string(), + language_name = buffer.language().map(|language| language.name().to_proto()) + ); + + report_anthropic_event( + &model.model, + AnthropicEventData { + completion_type: language_model::AnthropicCompletionType::Editor, + event: language_model::AnthropicEventType::Invoked, + language_name: buffer.language().map(|language| language.name().to_proto()), + message_id: None, + }, + cx, + ); } } + Some((codegen_ranges, newest_selection)) + } + + fn batch_assist( + &mut self, + editor: &Entity, + workspace: WeakEntity, + project: WeakEntity, + thread_store: Entity, + prompt_store: Option>, + initial_prompt: Option, + window: &mut Window, + codegen_ranges: &[Range], + newest_selection: Option>, + initial_transaction_id: Option, + cx: &mut App, + ) -> Option { + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); + let assist_group_id = self.next_assist_group_id.post_inc(); + let session_id = Uuid::new_v4(); let prompt_buffer = cx.new(|cx| { MultiBuffer::singleton( cx.new(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx)), @@ -483,17 +490,15 @@ impl InlineAssistant { let mut assists = Vec::new(); let mut assist_to_focus = None; + for range in codegen_ranges { let assist_id = self.next_assist_id.post_inc(); let codegen = cx.new(|cx| { BufferCodegen::new( editor.read(cx).buffer().clone(), range.clone(), - None, - context_store.clone(), - project.clone(), - prompt_store.clone(), - self.telemetry.clone(), + initial_transaction_id, + session_id, self.prompt_builder.clone(), cx, ) @@ -507,35 +512,39 @@ impl InlineAssistant { self.prompt_history.clone(), prompt_buffer.clone(), codegen.clone(), + session_id, self.fs.clone(), - context_store.clone(), - workspace.clone(), thread_store.clone(), - prompt_store.as_ref().map(|s| s.downgrade()), + prompt_store.clone(), + project.clone(), + workspace.clone(), window, cx, ) }); - if assist_to_focus.is_none() { + if let Some(newest_selection) = newest_selection.as_ref() + && assist_to_focus.is_none() + { let focus_assist = if newest_selection.reversed { - range.start.to_point(snapshot) == newest_selection.start + range.start.to_point(&snapshot) == newest_selection.start } else { - range.end.to_point(snapshot) == newest_selection.end + range.end.to_point(&snapshot) == newest_selection.end }; if focus_assist { assist_to_focus = Some(assist_id); } } - let [prompt_block_id, end_block_id] = - self.insert_assist_blocks(editor, &range, &prompt_editor, cx); + let [prompt_block_id, tool_description_block_id, end_block_id] = + self.insert_assist_blocks(&editor, &range, &prompt_editor, cx); assists.push(( assist_id, - range, + range.clone(), prompt_editor, prompt_block_id, + tool_description_block_id, end_block_id, )); } @@ -544,8 +553,25 @@ impl InlineAssistant { .assists_by_editor .entry(editor.downgrade()) .or_insert_with(|| EditorInlineAssists::new(editor, window, cx)); + + let assist_to_focus = if let Some(focus_id) = assist_to_focus { + Some(focus_id) + } else if assists.len() >= 1 { + Some(assists[0].0) + } else { + None + }; + let mut assist_group = InlineAssistGroup::new(); - for (assist_id, range, prompt_editor, prompt_block_id, end_block_id) in assists { + for ( + assist_id, + range, + prompt_editor, + prompt_block_id, + tool_description_block_id, + end_block_id, + ) in assists + { let codegen = prompt_editor.read(cx).codegen().clone(); self.assists.insert( @@ -556,6 +582,7 @@ impl InlineAssistant { editor, &prompt_editor, prompt_block_id, + tool_description_block_id, end_block_id, range, codegen, @@ -567,11 +594,50 @@ impl InlineAssistant { assist_group.assist_ids.push(assist_id); editor_assists.assist_ids.push(assist_id); } + self.assist_groups.insert(assist_group_id, assist_group); + assist_to_focus + } + + pub fn assist( + &mut self, + editor: &Entity, + workspace: WeakEntity, + project: WeakEntity, + thread_store: Entity, + prompt_store: Option>, + initial_prompt: Option, + window: &mut Window, + cx: &mut App, + ) -> Option { + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); + + let Some((codegen_ranges, newest_selection)) = + self.codegen_ranges(editor, &snapshot, window, cx) + else { + return None; + }; + + let assist_to_focus = self.batch_assist( + editor, + workspace, + project, + thread_store, + prompt_store, + initial_prompt, + window, + &codegen_ranges, + Some(newest_selection), + None, + cx, + ); + if let Some(assist_id) = assist_to_focus { self.focus_assist(assist_id, window, cx); } + + assist_to_focus } pub fn suggest_assist( @@ -582,17 +648,11 @@ impl InlineAssistant { initial_transaction_id: Option, focus: bool, workspace: Entity, + thread_store: Entity, prompt_store: Option>, - thread_store: Option>, window: &mut Window, cx: &mut App, ) -> InlineAssistId { - let assist_group_id = self.next_assist_group_id.post_inc(); - let prompt_buffer = cx.new(|cx| Buffer::local(&initial_prompt, cx)); - let prompt_buffer = cx.new(|cx| MultiBuffer::singleton(prompt_buffer, cx)); - - let assist_id = self.next_assist_id.post_inc(); - let buffer = editor.read(cx).buffer().clone(); { let snapshot = buffer.read(cx).read(cx); @@ -601,68 +661,22 @@ impl InlineAssistant { } let project = workspace.read(cx).project().downgrade(); - let context_store = cx.new(|_cx| ContextStore::new(project.clone())); - - let codegen = cx.new(|cx| { - BufferCodegen::new( - editor.read(cx).buffer().clone(), - range.clone(), - initial_transaction_id, - context_store.clone(), - project, - prompt_store.clone(), - self.telemetry.clone(), - self.prompt_builder.clone(), - cx, - ) - }); - let editor_margins = Arc::new(Mutex::new(EditorMargins::default())); - let prompt_editor = cx.new(|cx| { - PromptEditor::new_buffer( - assist_id, - editor_margins, - self.prompt_history.clone(), - prompt_buffer.clone(), - codegen.clone(), - self.fs.clone(), - context_store, + let assist_id = self + .batch_assist( + editor, workspace.downgrade(), + project, thread_store, - prompt_store.map(|s| s.downgrade()), + prompt_store, + Some(initial_prompt), window, + &[range], + None, + initial_transaction_id, cx, ) - }); - - let [prompt_block_id, end_block_id] = - self.insert_assist_blocks(editor, &range, &prompt_editor, cx); - - let editor_assists = self - .assists_by_editor - .entry(editor.downgrade()) - .or_insert_with(|| EditorInlineAssists::new(editor, window, cx)); - - let mut assist_group = InlineAssistGroup::new(); - self.assists.insert( - assist_id, - InlineAssist::new( - assist_id, - assist_group_id, - editor, - &prompt_editor, - prompt_block_id, - end_block_id, - range, - codegen.clone(), - workspace.downgrade(), - window, - cx, - ), - ); - assist_group.assist_ids.push(assist_id); - editor_assists.assist_ids.push(assist_id); - self.assist_groups.insert(assist_group_id, assist_group); + .expect("batch_assist returns an id if there's only one range"); if focus { self.focus_assist(assist_id, window, cx); @@ -677,7 +691,7 @@ impl InlineAssistant { range: &Range, prompt_editor: &Entity>, cx: &mut App, - ) -> [CustomBlockId; 2] { + ) -> [CustomBlockId; 3] { let prompt_editor_height = prompt_editor.update(cx, |prompt_editor, cx| { prompt_editor .editor @@ -691,6 +705,14 @@ impl InlineAssistant { render: build_assist_editor_renderer(prompt_editor), priority: 0, }, + // Placeholder for tool description - will be updated dynamically + BlockProperties { + style: BlockStyle::Flex, + placement: BlockPlacement::Below(range.end), + height: Some(0), + render: Arc::new(|_cx| div().into_any_element()), + priority: 0, + }, BlockProperties { style: BlockStyle::Sticky, placement: BlockPlacement::Below(range.end), @@ -709,7 +731,7 @@ impl InlineAssistant { editor.update(cx, |editor, cx| { let block_ids = editor.insert_blocks(assist_blocks, None, cx); - [block_ids[0], block_ids[1]] + [block_ids[0], block_ids[1], block_ids[2]] }) } @@ -803,7 +825,7 @@ impl InlineAssistant { ( editor .selections - .newest::(&editor.display_snapshot(cx)), + .newest::(&editor.display_snapshot(cx)), editor.buffer().read(cx).snapshot(cx), ) }); @@ -836,7 +858,7 @@ impl InlineAssistant { ( editor .selections - .newest::(&editor.display_snapshot(cx)), + .newest::(&editor.display_snapshot(cx)), editor.buffer().read(cx).snapshot(cx), ) }); @@ -853,12 +875,14 @@ impl InlineAssistant { } else { let distance_from_selection = assist_range .start - .abs_diff(selection.start) - .min(assist_range.start.abs_diff(selection.end)) + .0 + .abs_diff(selection.start.0) + .min(assist_range.start.0.abs_diff(selection.end.0)) + assist_range .end - .abs_diff(selection.start) - .min(assist_range.end.abs_diff(selection.end)); + .0 + .abs_diff(selection.start.0) + .min(assist_range.end.0.abs_diff(selection.end.0)); match closest_assist_fallback { Some((_, old_distance)) => { if distance_from_selection < old_distance { @@ -935,7 +959,7 @@ impl InlineAssistant { EditorEvent::Edited { transaction_id } => { let buffer = editor.read(cx).buffer().read(cx); let edited_ranges = - buffer.edited_ranges_for_transaction::(*transaction_id, cx); + buffer.edited_ranges_for_transaction::(*transaction_id, cx); let snapshot = buffer.snapshot(cx); for assist_id in editor_assists.assist_ids.clone() { @@ -1036,8 +1060,6 @@ impl InlineAssistant { } let active_alternative = assist.codegen.read(cx).active_alternative().clone(); - let message_id = active_alternative.read(cx).message_id.clone(); - if let Some(model) = LanguageModelRegistry::read_global(cx).inline_assistant_model() { let language_name = assist.editor.upgrade().and_then(|editor| { let multibuffer = editor.read(cx).buffer().read(cx); @@ -1046,28 +1068,49 @@ impl InlineAssistant { ranges .first() .and_then(|(buffer, _, _)| buffer.language()) - .map(|language| language.name()) + .map(|language| language.name().0.to_string()) }); - report_assistant_event( - AssistantEventData { - conversation_id: None, - kind: AssistantKind::Inline, + + let codegen = assist.codegen.read(cx); + let session_id = codegen.session_id(); + let message_id = active_alternative.read(cx).message_id.clone(); + let model_telemetry_id = model.model.telemetry_id(); + let model_provider_id = model.model.provider_id().to_string(); + + let (phase, event_type, anthropic_event_type) = if undo { + ( + "rejected", + "Assistant Response Rejected", + language_model::AnthropicEventType::Reject, + ) + } else { + ( + "accepted", + "Assistant Response Accepted", + language_model::AnthropicEventType::Accept, + ) + }; + + telemetry::event!( + event_type, + phase, + session_id = session_id.to_string(), + kind = "inline", + model = model_telemetry_id, + model_provider = model_provider_id, + language_name = language_name, + message_id = message_id.as_deref(), + ); + + report_anthropic_event( + &model.model, + language_model::AnthropicEventData { + completion_type: language_model::AnthropicCompletionType::Editor, + event: anthropic_event_type, + language_name, message_id, - phase: if undo { - AssistantPhase::Rejected - } else { - AssistantPhase::Accepted - }, - model: model.model.telemetry_id(), - model_provider: model.model.provider_id().to_string(), - response_latency: None, - error_message: None, - language_name: language_name.map(|name| name.to_proto()), }, - Some(self.telemetry.clone()), - cx.http_client(), - model.model.api_key(cx), - cx.background_executor(), + cx, ); } @@ -1099,6 +1142,9 @@ impl InlineAssistant { let mut to_remove = decorations.removed_line_block_ids; to_remove.insert(decorations.prompt_block_id); to_remove.insert(decorations.end_block_id); + if let Some(tool_description_block_id) = decorations.model_explanation { + to_remove.insert(tool_description_block_id); + } editor.remove_blocks(to_remove, None, cx); }); @@ -1151,7 +1197,7 @@ impl InlineAssistant { assist .editor - .update(cx, |editor, cx| window.focus(&editor.focus_handle(cx))) + .update(cx, |editor, cx| window.focus(&editor.focus_handle(cx), cx)) .ok(); } @@ -1163,7 +1209,7 @@ impl InlineAssistant { if let Some(decorations) = assist.decorations.as_ref() { decorations.prompt_editor.update(cx, |prompt_editor, cx| { prompt_editor.editor.update(cx, |editor, cx| { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.select_all(&SelectAll, window, cx); }) }); @@ -1274,7 +1320,8 @@ impl InlineAssistant { return; } - let Some(user_prompt) = assist.user_prompt(cx) else { + let Some((user_prompt, mention_set)) = assist.user_prompt(cx).zip(assist.mention_set(cx)) + else { return; }; @@ -1290,9 +1337,12 @@ impl InlineAssistant { return; }; + let context_task = load_context(&mention_set, cx).shared(); assist .codegen - .update(cx, |codegen, cx| codegen.start(model, user_prompt, cx)) + .update(cx, |codegen, cx| { + codegen.start(model, user_prompt, context_task, cx) + }) .log_err(); } @@ -1438,6 +1488,7 @@ impl InlineAssistant { multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( old_buffer.clone(), + // todo(lw): buffer_start and buffer_end might come from different snapshots! Some(ExcerptRange::new(buffer_start..buffer_end)), cx, ); @@ -1449,6 +1500,7 @@ impl InlineAssistant { editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx); editor.set_show_wrap_guides(false, cx); editor.set_show_gutter(false, cx); + editor.set_offset_content(false, cx); editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_edit_predictions(Some(false), window, cx); @@ -1533,6 +1585,27 @@ impl InlineAssistant { .map(InlineAssistTarget::Terminal) } } + + #[cfg(any(test, feature = "test-support"))] + pub fn set_completion_receiver( + &mut self, + sender: mpsc::UnboundedSender>, + ) { + self._inline_assistant_completions = Some(sender); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn get_codegen( + &mut self, + assist_id: InlineAssistId, + cx: &mut App, + ) -> Option> { + self.assists.get(&assist_id).map(|inline_assist| { + inline_assist + .codegen + .update(cx, |codegen, _cx| codegen.active_alternative().clone()) + }) + } } struct EditorInlineAssists { @@ -1666,6 +1739,7 @@ impl InlineAssist { editor: &Entity, prompt_editor: &Entity>, prompt_block_id: CustomBlockId, + tool_description_block_id: CustomBlockId, end_block_id: CustomBlockId, range: Range, codegen: Entity, @@ -1680,7 +1754,8 @@ impl InlineAssist { decorations: Some(InlineAssistDecorations { prompt_block_id, prompt_editor: prompt_editor.clone(), - removed_line_block_ids: HashSet::default(), + removed_line_block_ids: Default::default(), + model_explanation: Some(tool_description_block_id), end_block_id, }), range, @@ -1732,6 +1807,16 @@ impl InlineAssist { && assist.decorations.is_none() && let Some(workspace) = assist.workspace.upgrade() { + #[cfg(any(test, feature = "test-support"))] + if let Some(sender) = &mut this._inline_assistant_completions { + sender + .unbounded_send(Err(anyhow::anyhow!( + "Inline assistant error: {}", + error + ))) + .ok(); + } + let error = format!("Inline assistant error: {}", error); workspace.update(cx, |workspace, cx| { struct InlineAssistantError; @@ -1742,6 +1827,11 @@ impl InlineAssist { workspace.show_toast(Toast::new(id, error), cx); }) + } else { + #[cfg(any(test, feature = "test-support"))] + if let Some(sender) = &mut this._inline_assistant_completions { + sender.unbounded_send(Ok(assist_id)).ok(); + } } if assist.decorations.is_none() { @@ -1758,22 +1848,27 @@ impl InlineAssist { let decorations = self.decorations.as_ref()?; Some(decorations.prompt_editor.read(cx).prompt(cx)) } + + fn mention_set(&self, cx: &App) -> Option> { + let decorations = self.decorations.as_ref()?; + Some(decorations.prompt_editor.read(cx).mention_set().clone()) + } } struct InlineAssistDecorations { prompt_block_id: CustomBlockId, prompt_editor: Entity>, removed_line_block_ids: HashSet, + model_explanation: Option, end_block_id: CustomBlockId, } struct AssistantCodeActionProvider { editor: WeakEntity, workspace: WeakEntity, - thread_store: Option>, } -const ASSISTANT_CODE_ACTION_PROVIDER_ID: &str = "assistant2"; +const ASSISTANT_CODE_ACTION_PROVIDER_ID: &str = "assistant"; impl CodeActionProvider for AssistantCodeActionProvider { fn id(&self) -> Arc { @@ -1841,10 +1936,20 @@ impl CodeActionProvider for AssistantCodeActionProvider { ) -> Task> { let editor = self.editor.clone(); let workspace = self.workspace.clone(); - let thread_store = self.thread_store.clone(); let prompt_store = PromptStore::global(cx); window.spawn(cx, async move |cx| { let workspace = workspace.upgrade().context("workspace was released")?; + let thread_store = cx.update(|_window, cx| { + anyhow::Ok( + workspace + .read(cx) + .panel::(cx) + .context("missing agent panel")? + .read(cx) + .thread_store() + .clone(), + ) + })??; let editor = editor.upgrade().context("editor was released")?; let range = editor .update(cx, |editor, cx| { @@ -1887,8 +1992,8 @@ impl CodeActionProvider for AssistantCodeActionProvider { None, true, workspace, - prompt_store, thread_store, + prompt_store, window, cx, ); @@ -1921,3 +2026,387 @@ fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) { } } } + +#[cfg(any(test, feature = "unit-eval"))] +#[cfg_attr(not(test), allow(dead_code))] +pub mod test { + + use std::sync::Arc; + + use agent::HistoryStore; + use assistant_text_thread::TextThreadStore; + use client::{Client, UserStore}; + use editor::{Editor, MultiBuffer, MultiBufferOffset}; + use fs::FakeFs; + use futures::channel::mpsc; + use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; + use language::Buffer; + use project::Project; + use prompt_store::PromptBuilder; + use smol::stream::StreamExt as _; + use util::test::marked_text_ranges; + use workspace::Workspace; + + use crate::InlineAssistant; + + #[derive(Debug)] + pub enum InlineAssistantOutput { + Success { + completion: Option, + description: Option, + full_buffer_text: String, + }, + Failure { + failure: String, + }, + // These fields are used for logging + #[allow(unused)] + Malformed { + completion: Option, + description: Option, + failure: Option, + }, + } + + pub fn run_inline_assistant_test( + base_buffer: String, + prompt: String, + setup: SetupF, + test: TestF, + cx: &mut TestAppContext, + ) -> InlineAssistantOutput + where + SetupF: FnOnce(&mut gpui::VisualTestContext), + TestF: FnOnce(&mut gpui::VisualTestContext), + { + let fs = FakeFs::new(cx.executor()); + let app_state = cx.update(|cx| workspace::AppState::test(cx)); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let http = Arc::new(reqwest_client::ReqwestClient::user_agent("agent tests").unwrap()); + let client = cx.update(|cx| { + cx.set_http_client(http); + Client::production(cx) + }); + let mut inline_assistant = InlineAssistant::new(fs.clone(), prompt_builder); + + let (tx, mut completion_rx) = mpsc::unbounded(); + inline_assistant.set_completion_receiver(tx); + + // Initialize settings and client + cx.update(|cx| { + gpui_tokio::init(cx); + settings::init(cx); + client::init(&client, cx); + workspace::init(app_state.clone(), cx); + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(client.clone(), cx); + language_models::init(user_store, client.clone(), cx); + + cx.set_global(inline_assistant); + }); + + let project = cx + .executor() + .block_test(async { Project::test(fs.clone(), [], cx).await }); + + // Create workspace with window + let (workspace, cx) = cx.add_window_view(|window, cx| { + window.activate_window(); + Workspace::new(None, project.clone(), app_state.clone(), window, cx) + }); + + setup(cx); + + let (_editor, buffer) = cx.update(|window, cx| { + let buffer = cx.new(|cx| Buffer::local("", cx)); + let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + let editor = cx.new(|cx| Editor::for_multibuffer(multibuffer, None, window, cx)); + editor.update(cx, |editor, cx| { + let (unmarked_text, selection_ranges) = marked_text_ranges(&base_buffer, true); + editor.set_text(unmarked_text, window, cx); + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges( + selection_ranges.into_iter().map(|range| { + MultiBufferOffset(range.start)..MultiBufferOffset(range.end) + }), + ) + }) + }); + + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + + // Add editor to workspace + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); + }); + + // Call assist method + InlineAssistant::update_global(cx, |inline_assistant, cx| { + let assist_id = inline_assistant + .assist( + &editor, + workspace.downgrade(), + project.downgrade(), + history_store, // thread_store + None, // prompt_store + Some(prompt), + window, + cx, + ) + .unwrap(); + + inline_assistant.start_assist(assist_id, window, cx); + }); + + (editor, buffer) + }); + + cx.run_until_parked(); + + test(cx); + + let assist_id = cx + .executor() + .block_test(async { completion_rx.next().await }) + .unwrap() + .unwrap(); + + let (completion, description, failure) = cx.update(|_, cx| { + InlineAssistant::update_global(cx, |inline_assistant, cx| { + let codegen = inline_assistant.get_codegen(assist_id, cx).unwrap(); + + let completion = codegen.read(cx).current_completion(); + let description = codegen.read(cx).current_description(); + let failure = codegen.read(cx).current_failure(); + + (completion, description, failure) + }) + }); + + if failure.is_some() && (completion.is_some() || description.is_some()) { + InlineAssistantOutput::Malformed { + completion, + description, + failure, + } + } else if let Some(failure) = failure { + InlineAssistantOutput::Failure { failure } + } else { + InlineAssistantOutput::Success { + completion, + description, + full_buffer_text: buffer.read_with(cx, |buffer, _| buffer.text()), + } + } + } +} + +#[cfg(any(test, feature = "unit-eval"))] +#[cfg_attr(not(test), allow(dead_code))] +pub mod evals { + use std::str::FromStr; + + use eval_utils::{EvalOutput, NoProcessor}; + use gpui::TestAppContext; + use language_model::{LanguageModelRegistry, SelectedModel}; + use rand::{SeedableRng as _, rngs::StdRng}; + + use crate::inline_assistant::test::{InlineAssistantOutput, run_inline_assistant_test}; + + #[test] + #[cfg_attr(not(feature = "unit-eval"), ignore)] + fn eval_single_cursor_edit() { + run_eval( + 20, + 1.0, + "Rename this variable to buffer_text".to_string(), + indoc::indoc! {" + struct EvalExampleStruct { + text: Strˇing, + prompt: String, + } + "} + .to_string(), + exact_buffer_match(indoc::indoc! {" + struct EvalExampleStruct { + buffer_text: String, + prompt: String, + } + "}), + ); + } + + #[test] + #[cfg_attr(not(feature = "unit-eval"), ignore)] + fn eval_cant_do() { + run_eval( + 20, + 0.95, + "Rename the struct to EvalExampleStructNope", + indoc::indoc! {" + struct EvalExampleStruct { + text: Strˇing, + prompt: String, + } + "}, + uncertain_output, + ); + } + + #[test] + #[cfg_attr(not(feature = "unit-eval"), ignore)] + fn eval_unclear() { + run_eval( + 20, + 0.95, + "Make exactly the change I want you to make", + indoc::indoc! {" + struct EvalExampleStruct { + text: Strˇing, + prompt: String, + } + "}, + uncertain_output, + ); + } + + #[test] + #[cfg_attr(not(feature = "unit-eval"), ignore)] + fn eval_empty_buffer() { + run_eval( + 20, + 1.0, + "Write a Python hello, world program".to_string(), + "ˇ".to_string(), + |output| match output { + InlineAssistantOutput::Success { + full_buffer_text, .. + } => { + if full_buffer_text.is_empty() { + EvalOutput::failed("expected some output".to_string()) + } else { + EvalOutput::passed(format!("Produced {full_buffer_text}")) + } + } + o @ InlineAssistantOutput::Failure { .. } => EvalOutput::failed(format!( + "Assistant output does not match expected output: {:?}", + o + )), + o @ InlineAssistantOutput::Malformed { .. } => EvalOutput::failed(format!( + "Assistant output does not match expected output: {:?}", + o + )), + }, + ); + } + + fn run_eval( + iterations: usize, + expected_pass_ratio: f32, + prompt: impl Into, + buffer: impl Into, + judge: impl Fn(InlineAssistantOutput) -> eval_utils::EvalOutput<()> + Send + Sync + 'static, + ) { + let buffer = buffer.into(); + let prompt = prompt.into(); + + eval_utils::eval(iterations, expected_pass_ratio, NoProcessor, move || { + let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng()); + let mut cx = TestAppContext::build(dispatcher, None); + cx.skip_drawing(); + + let output = run_inline_assistant_test( + buffer.clone(), + prompt.clone(), + |cx| { + // Reconfigure to use a real model instead of the fake one + let model_name = std::env::var("ZED_AGENT_MODEL") + .unwrap_or("anthropic/claude-sonnet-4-latest".into()); + + let selected_model = SelectedModel::from_str(&model_name) + .expect("Invalid model format. Use 'provider/model-id'"); + + log::info!("Selected model: {selected_model:?}"); + + cx.update(|_, cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.select_inline_assistant_model(Some(&selected_model), cx); + }); + }); + }, + |_cx| { + log::info!("Waiting for actual response from the LLM..."); + }, + &mut cx, + ); + + cx.quit(); + + judge(output) + }); + } + + fn uncertain_output(output: InlineAssistantOutput) -> EvalOutput<()> { + match &output { + o @ InlineAssistantOutput::Success { + completion, + description, + .. + } => { + if description.is_some() && completion.is_none() { + EvalOutput::passed(format!( + "Assistant produced no completion, but a description:\n{}", + description.as_ref().unwrap() + )) + } else { + EvalOutput::failed(format!("Assistant produced a completion:\n{:?}", o)) + } + } + InlineAssistantOutput::Failure { + failure: error_message, + } => EvalOutput::passed(format!( + "Assistant produced a failure message: {}", + error_message + )), + o @ InlineAssistantOutput::Malformed { .. } => { + EvalOutput::failed(format!("Assistant produced a malformed response:\n{:?}", o)) + } + } + } + + fn exact_buffer_match( + correct_output: impl Into, + ) -> impl Fn(InlineAssistantOutput) -> EvalOutput<()> { + let correct_output = correct_output.into(); + move |output| match output { + InlineAssistantOutput::Success { + description, + full_buffer_text, + .. + } => { + if full_buffer_text == correct_output && description.is_none() { + EvalOutput::passed("Assistant output matches") + } else if full_buffer_text == correct_output { + EvalOutput::failed(format!( + "Assistant output produced an unescessary description description:\n{:?}", + description + )) + } else { + EvalOutput::failed(format!( + "Assistant output does not match expected output:\n{:?}\ndescription:\n{:?}", + full_buffer_text, description + )) + } + } + o @ InlineAssistantOutput::Failure { .. } => EvalOutput::failed(format!( + "Assistant output does not match expected output: {:?}", + o + )), + o @ InlineAssistantOutput::Malformed { .. } => EvalOutput::failed(format!( + "Assistant output does not match expected output: {:?}", + o + )), + } + } +} diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index 2d0538a9172ae69e50b2e4208e540662e7d838b2..8d96d56ea67cc9366df420b23e2221636d3450fb 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -1,19 +1,23 @@ use agent::HistoryStore; use collections::{HashMap, VecDeque}; use editor::actions::Paste; +use editor::code_context_menus::CodeContextMenu; use editor::display_map::{CreaseId, EditorMargins}; -use editor::{Addon, AnchorRangeExt as _}; +use editor::{AnchorRangeExt as _, MultiBufferOffset, ToOffset as _}; use editor::{ ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer, actions::{MoveDown, MoveUp}, }; +use feature_flags::{FeatureFlagAppExt, InlineAssistantUseToolFeatureFlag}; use fs::Fs; use gpui::{ - AnyElement, App, ClipboardEntry, Context, CursorStyle, Entity, EventEmitter, FocusHandle, - Focusable, Subscription, TextStyle, WeakEntity, Window, + AnyElement, App, ClipboardItem, Context, Entity, EventEmitter, FocusHandle, Focusable, + Subscription, TextStyle, TextStyleRefinement, WeakEntity, Window, actions, }; use language_model::{LanguageModel, LanguageModelRegistry}; +use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle}; use parking_lot::Mutex; +use project::Project; use prompt_store::PromptStore; use settings::Settings; use std::cmp; @@ -23,27 +27,41 @@ use std::sync::Arc; use theme::ThemeSettings; use ui::utils::WithRemSize; use ui::{IconButtonShape, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*}; -use workspace::Workspace; +use uuid::Uuid; +use workspace::notifications::NotificationId; +use workspace::{Toast, Workspace}; use zed_actions::agent::ToggleModelSelector; use crate::agent_model_selector::AgentModelSelector; -use crate::buffer_codegen::BufferCodegen; -use crate::context::{AgentContextHandle, AgentContextKey}; -use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention}; -use crate::context_store::{ContextStore, ContextStoreEvent}; -use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind}; -use crate::terminal_codegen::TerminalCodegen; -use crate::{ - CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext, RemoveAllContext, - ToggleContextPicker, +use crate::buffer_codegen::{BufferCodegen, CodegenAlternative}; +use crate::completion_provider::{ + PromptCompletionProvider, PromptCompletionProviderDelegate, PromptContextType, }; +use crate::mention_set::paste_images_as_context; +use crate::mention_set::{MentionSet, crease_for_mention}; +use crate::terminal_codegen::TerminalCodegen; +use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext}; + +actions!(inline_assistant, [ThumbsUpResult, ThumbsDownResult]); + +enum CompletionState { + Pending, + Generated { completion_text: Option }, + Rated, +} + +struct SessionState { + session_id: Uuid, + completion: CompletionState, +} pub struct PromptEditor { pub editor: Entity, mode: PromptEditorMode, - context_store: Entity, - context_strip: Entity, - context_picker_menu_handle: PopoverMenuHandle, + mention_set: Entity, + history_store: Entity, + prompt_store: Option>, + workspace: WeakEntity, model_selector: Entity, edited_since_done: bool, prompt_history: VecDeque, @@ -51,8 +69,8 @@ pub struct PromptEditor { pending_prompt: String, _codegen_subscription: Subscription, editor_subscriptions: Vec, - _context_strip_subscription: Subscription, show_rate_limit_notice: bool, + session_state: SessionState, _phantom: std::marker::PhantomData, } @@ -65,7 +83,7 @@ impl Render for PromptEditor { const RIGHT_PADDING: Pixels = px(9.); - let (left_gutter_width, right_padding) = match &self.mode { + let (left_gutter_width, right_padding, explanation) = match &self.mode { PromptEditorMode::Buffer { id: _, codegen, @@ -83,38 +101,67 @@ impl Render for PromptEditor { let left_gutter_width = gutter.full_width() + (gutter.margin / 2.0); let right_padding = editor_margins.right + RIGHT_PADDING; - (left_gutter_width, right_padding) + let active_alternative = codegen.active_alternative().read(cx); + let explanation = active_alternative + .description + .clone() + .or_else(|| active_alternative.failure.clone()); + + (left_gutter_width, right_padding, explanation) } PromptEditorMode::Terminal { .. } => { // Give the equivalent of the same left-padding that we're using on the right - (Pixels::from(40.0), Pixels::from(24.)) + (Pixels::from(40.0), Pixels::from(24.), None) } }; let bottom_padding = match &self.mode { PromptEditorMode::Buffer { .. } => rems_from_px(2.0), - PromptEditorMode::Terminal { .. } => rems_from_px(8.0), + PromptEditorMode::Terminal { .. } => rems_from_px(4.0), }; buttons.extend(self.render_buttons(window, cx)); + let menu_visible = self.is_completions_menu_visible(cx); + let add_context_button = IconButton::new("add-context", IconName::AtSign) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .when(!menu_visible, |this| { + this.tooltip(move |_window, cx| { + Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx) + }) + }) + .on_click(cx.listener(move |this, _, window, cx| { + this.trigger_completion_menu(window, cx); + })); + + let markdown = window.use_state(cx, |_, cx| Markdown::new("".into(), None, None, cx)); + + if let Some(explanation) = &explanation { + markdown.update(cx, |markdown, cx| { + markdown.reset(SharedString::from(explanation), cx); + }); + } + + let explanation_label = self + .render_markdown(markdown, markdown_style(window, cx)) + .into_any_element(); + v_flex() .key_context("PromptEditor") .capture_action(cx.listener(Self::paste)) - .bg(cx.theme().colors().editor_background) .block_mouse_except_scroll() - .gap_0p5() - .border_y_1() - .border_color(cx.theme().status().info_border) .size_full() .pt_0p5() .pb(bottom_padding) .pr(right_padding) + .gap_0p5() + .justify_center() + .border_y_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) .child( h_flex() - .items_start() - .cursor(CursorStyle::Arrow) - .on_action(cx.listener(Self::toggle_context_picker)) .on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| { this.model_selector .update(cx, |model_selector, cx| model_selector.toggle(window, cx)); @@ -123,19 +170,20 @@ impl Render for PromptEditor { .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) .on_action(cx.listener(Self::move_down)) - .on_action(cx.listener(Self::remove_all_context)) + .on_action(cx.listener(Self::thumbs_up)) + .on_action(cx.listener(Self::thumbs_down)) .capture_action(cx.listener(Self::cycle_prev)) .capture_action(cx.listener(Self::cycle_next)) .child( WithRemSize::new(ui_font_size) + .h_full() + .w(left_gutter_width) .flex() .flex_row() .flex_shrink_0() .items_center() - .h_full() - .w(left_gutter_width) .justify_center() - .gap_2() + .gap_1() .child(self.render_close_button(cx)) .map(|el| { let CodegenStatus::Error(error) = self.codegen_status(cx) else { @@ -166,26 +214,83 @@ impl Render for PromptEditor { .flex_row() .items_center() .gap_1() + .child(add_context_button) + .child(self.model_selector.clone()) .children(buttons), ), ), ) - .child( - WithRemSize::new(ui_font_size) - .flex() - .flex_row() - .items_center() - .child(h_flex().flex_shrink_0().w(left_gutter_width)) - .child( - h_flex() - .w_full() - .pl_1() - .items_start() - .justify_between() - .child(self.context_strip.clone()) - .child(self.model_selector.clone()), - ), - ) + .when_some(explanation, |this, _| { + this.child( + h_flex() + .size_full() + .justify_center() + .child(div().w(left_gutter_width + px(6.))) + .child( + div() + .size_full() + .min_w_0() + .pt(rems_from_px(3.)) + .pl_0p5() + .flex_1() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child(explanation_label), + ), + ) + }) + } +} + +fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle { + let theme_settings = ThemeSettings::get_global(cx); + let colors = cx.theme().colors(); + let mut text_style = window.text_style(); + + text_style.refine(&TextStyleRefinement { + font_family: Some(theme_settings.ui_font.family.clone()), + color: Some(colors.text), + ..Default::default() + }); + + MarkdownStyle { + base_text_style: text_style.clone(), + syntax: cx.theme().syntax().clone(), + selection_background_color: colors.element_selection_background, + heading_level_styles: Some(HeadingLevelStyles { + h1: Some(TextStyleRefinement { + font_size: Some(rems(1.15).into()), + ..Default::default() + }), + h2: Some(TextStyleRefinement { + font_size: Some(rems(1.1).into()), + ..Default::default() + }), + h3: Some(TextStyleRefinement { + font_size: Some(rems(1.05).into()), + ..Default::default() + }), + h4: Some(TextStyleRefinement { + font_size: Some(rems(1.).into()), + ..Default::default() + }), + h5: Some(TextStyleRefinement { + font_size: Some(rems(0.95).into()), + ..Default::default() + }), + h6: Some(TextStyleRefinement { + font_size: Some(rems(0.875).into()), + ..Default::default() + }), + }), + inline_code: TextStyleRefinement { + font_family: Some(theme_settings.buffer_font.family.clone()), + font_fallbacks: theme_settings.buffer_font.fallbacks.clone(), + font_features: Some(theme_settings.buffer_font.features.clone()), + background_color: Some(colors.editor_foreground.opacity(0.08)), + ..Default::default() + }, + ..Default::default() } } @@ -214,6 +319,19 @@ impl PromptEditor { )); } + fn assign_completion_provider(&mut self, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + editor.set_completion_provider(Some(Rc::new(PromptCompletionProvider::new( + PromptEditorCompletionProviderDelegate, + cx.weak_entity(), + self.mention_set.clone(), + self.history_store.clone(), + self.prompt_store.clone(), + self.workspace.clone(), + )))); + }); + } + pub fn set_show_cursor_when_unfocused( &mut self, show_cursor_when_unfocused: bool, @@ -226,27 +344,40 @@ impl PromptEditor { pub fn unlink(&mut self, window: &mut Window, cx: &mut Context) { let prompt = self.prompt(cx); - let existing_creases = self.editor.update(cx, extract_message_creases); - + let existing_creases = self.editor.update(cx, |editor, cx| { + extract_message_creases(editor, &self.mention_set, window, cx) + }); let focus = self.editor.focus_handle(cx).contains_focused(window, cx); + let mut creases = vec![]; self.editor = cx.new(|cx| { let mut editor = Editor::auto_height(1, Self::MAX_LINES as usize, window, cx); editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); editor.set_placeholder_text("Add a prompt…", window, cx); editor.set_text(prompt, window, cx); - insert_message_creases( - &mut editor, - &existing_creases, - &self.context_store, - window, - cx, - ); + creases = insert_message_creases(&mut editor, &existing_creases, window, cx); if focus { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); } editor }); + + self.mention_set.update(cx, |mention_set, _cx| { + debug_assert_eq!( + creases.len(), + mention_set.creases().len(), + "Missing creases" + ); + + let mentions = mention_set + .clear() + .zip(creases) + .map(|((_, value), id)| (id, value)) + .collect::>(); + mention_set.set_mentions(mentions); + }); + + self.assign_completion_provider(cx); self.subscribe_to_editor(window, cx); } @@ -274,43 +405,29 @@ impl PromptEditor { self.editor.read(cx).text(cx) } - fn paste(&mut self, _: &Paste, _window: &mut Window, cx: &mut Context) { - let images = cx - .read_from_clipboard() - .map(|item| { - item.into_entries() - .filter_map(|entry| { - if let ClipboardEntry::Image(image) = entry { - Some(image) - } else { - None - } - }) - .collect::>() - }) - .unwrap_or_default(); - - if images.is_empty() { - return; + fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if inline_assistant_model_supports_images(cx) + && let Some(task) = + paste_images_as_context(self.editor.clone(), self.mention_set.clone(), window, cx) + { + task.detach(); } - cx.stop_propagation(); - - self.context_store.update(cx, |store, cx| { - for image in images { - store.add_image_instance(Arc::new(image), cx); - } - }); } fn handle_prompt_editor_events( &mut self, - _: &Entity, + editor: &Entity, event: &EditorEvent, window: &mut Window, cx: &mut Context, ) { match event { EditorEvent::Edited { .. } => { + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); + + self.mention_set + .update(cx, |mention_set, _cx| mention_set.remove_invalid(&snapshot)); + if let Some(workspace) = window.root::().flatten() { workspace.update(cx, |workspace, cx| { let is_via_ssh = workspace.project().read(cx).is_via_remote_server(); @@ -321,7 +438,7 @@ impl PromptEditor { .log_edit_event("inline assist", is_via_ssh); }); } - let prompt = self.editor.read(cx).text(cx); + let prompt = snapshot.text(); if self .prompt_history_ix .is_none_or(|ix| self.prompt_history[ix] != prompt) @@ -331,6 +448,7 @@ impl PromptEditor { } self.edited_since_done = true; + self.session_state.completion = CompletionState::Pending; cx.notify(); } EditorEvent::Blurred => { @@ -343,23 +461,44 @@ impl PromptEditor { } } - fn toggle_context_picker( - &mut self, - _: &ToggleContextPicker, - window: &mut Window, - cx: &mut Context, - ) { - self.context_picker_menu_handle.toggle(window, cx); + pub fn is_completions_menu_visible(&self, cx: &App) -> bool { + self.editor + .read(cx) + .context_menu() + .borrow() + .as_ref() + .is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()) } - pub fn remove_all_context( - &mut self, - _: &RemoveAllContext, - _window: &mut Window, - cx: &mut Context, - ) { - self.context_store.update(cx, |store, cx| store.clear(cx)); - cx.notify(); + pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + let menu_is_open = editor.context_menu().borrow().as_ref().is_some_and(|menu| { + matches!(menu, CodeContextMenu::Completions(_)) && menu.visible() + }); + + let has_at_sign = { + let snapshot = editor.display_snapshot(cx); + let cursor = editor.selections.newest::(&snapshot).head(); + let offset = cursor.to_offset(&snapshot); + if offset.0 > 0 { + snapshot + .buffer_snapshot() + .reversed_chars_at(offset) + .next() + .map(|sign| sign == '@') + .unwrap_or(false) + } else { + false + } + }; + + if menu_is_open && has_at_sign { + return; + } + + editor.insert("@", window, cx); + editor.show_completions(&editor::actions::ShowCompletions, window, cx); + }); } fn cancel( @@ -381,22 +520,207 @@ impl PromptEditor { fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { match self.codegen_status(cx) { CodegenStatus::Idle => { + self.fire_started_telemetry(cx); cx.emit(PromptEditorEvent::StartRequested); } CodegenStatus::Pending => {} CodegenStatus::Done => { if self.edited_since_done { + self.fire_started_telemetry(cx); cx.emit(PromptEditorEvent::StartRequested); } else { cx.emit(PromptEditorEvent::ConfirmRequested { execute: false }); } } CodegenStatus::Error(_) => { + self.fire_started_telemetry(cx); cx.emit(PromptEditorEvent::StartRequested); } } } + fn fire_started_telemetry(&self, cx: &Context) { + let Some(model) = LanguageModelRegistry::read_global(cx).inline_assistant_model() else { + return; + }; + + let model_telemetry_id = model.model.telemetry_id(); + let model_provider_id = model.provider.id().to_string(); + + let (kind, language_name) = match &self.mode { + PromptEditorMode::Buffer { codegen, .. } => { + let codegen = codegen.read(cx); + ( + "inline", + codegen.language_name(cx).map(|name| name.to_string()), + ) + } + PromptEditorMode::Terminal { .. } => ("inline_terminal", None), + }; + + telemetry::event!( + "Assistant Started", + session_id = self.session_state.session_id.to_string(), + kind = kind, + phase = "started", + model = model_telemetry_id, + model_provider = model_provider_id, + language_name = language_name, + ); + } + + fn thumbs_up(&mut self, _: &ThumbsUpResult, _window: &mut Window, cx: &mut Context) { + match &self.session_state.completion { + CompletionState::Pending => { + self.toast("Can't rate, still generating...", None, cx); + return; + } + CompletionState::Rated => { + self.toast( + "Already rated this completion", + Some(self.session_state.session_id), + cx, + ); + return; + } + CompletionState::Generated { completion_text } => { + let model_info = self.model_selector.read(cx).active_model(cx); + let (model_id, use_streaming_tools) = { + let Some(configured_model) = model_info else { + self.toast("No configured model", None, cx); + return; + }; + ( + configured_model.model.telemetry_id(), + CodegenAlternative::use_streaming_tools( + configured_model.model.as_ref(), + cx, + ), + ) + }; + + let selected_text = match &self.mode { + PromptEditorMode::Buffer { codegen, .. } => { + codegen.read(cx).selected_text(cx).map(|s| s.to_string()) + } + PromptEditorMode::Terminal { .. } => None, + }; + + let prompt = self.editor.read(cx).text(cx); + + let kind = match &self.mode { + PromptEditorMode::Buffer { .. } => "inline", + PromptEditorMode::Terminal { .. } => "inline_terminal", + }; + + telemetry::event!( + "Inline Assistant Rated", + rating = "positive", + session_id = self.session_state.session_id.to_string(), + kind = kind, + model = model_id, + prompt = prompt, + completion = completion_text, + selected_text = selected_text, + use_streaming_tools + ); + + self.session_state.completion = CompletionState::Rated; + + cx.notify(); + } + } + } + + fn thumbs_down(&mut self, _: &ThumbsDownResult, _window: &mut Window, cx: &mut Context) { + match &self.session_state.completion { + CompletionState::Pending => { + self.toast("Can't rate, still generating...", None, cx); + return; + } + CompletionState::Rated => { + self.toast( + "Already rated this completion", + Some(self.session_state.session_id), + cx, + ); + return; + } + CompletionState::Generated { completion_text } => { + let model_info = self.model_selector.read(cx).active_model(cx); + let (model_telemetry_id, use_streaming_tools) = { + let Some(configured_model) = model_info else { + self.toast("No configured model", None, cx); + return; + }; + ( + configured_model.model.telemetry_id(), + CodegenAlternative::use_streaming_tools( + configured_model.model.as_ref(), + cx, + ), + ) + }; + + let selected_text = match &self.mode { + PromptEditorMode::Buffer { codegen, .. } => { + codegen.read(cx).selected_text(cx).map(|s| s.to_string()) + } + PromptEditorMode::Terminal { .. } => None, + }; + + let prompt = self.editor.read(cx).text(cx); + + let kind = match &self.mode { + PromptEditorMode::Buffer { .. } => "inline", + PromptEditorMode::Terminal { .. } => "inline_terminal", + }; + + telemetry::event!( + "Inline Assistant Rated", + rating = "negative", + session_id = self.session_state.session_id.to_string(), + kind = kind, + model = model_telemetry_id, + prompt = prompt, + completion = completion_text, + selected_text = selected_text, + use_streaming_tools + ); + + self.session_state.completion = CompletionState::Rated; + + cx.notify(); + } + } + } + + fn toast(&mut self, msg: &str, uuid: Option, cx: &mut Context<'_, PromptEditor>) { + self.workspace + .update(cx, |workspace, cx| { + enum InlinePromptRating {} + workspace.show_toast( + { + let mut toast = Toast::new( + NotificationId::unique::(), + msg.to_string(), + ) + .autohide(); + + if let Some(uuid) = uuid { + toast = toast.on_click("Click to copy rating ID", move |_, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(uuid.to_string())); + }); + }; + + toast + }, + cx, + ); + }) + .ok(); + } + fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { if let Some(ix) = self.prompt_history_ix { if ix > 0 { @@ -434,8 +758,6 @@ impl PromptEditor { editor.move_to_end(&Default::default(), window, cx) }); } - } else if self.context_strip.read(cx).has_context_items(cx) { - self.context_strip.focus_handle(cx).focus(window); } } @@ -504,6 +826,9 @@ impl PromptEditor { .into_any_element(), ] } else { + let show_rating_buttons = cx.has_flag::(); + let rated = matches!(self.session_state.completion, CompletionState::Rated); + let accept = IconButton::new("accept", IconName::Check) .icon_color(Color::Info) .shape(IconButtonShape::Square) @@ -515,25 +840,92 @@ impl PromptEditor { })) .into_any_element(); - match &self.mode { - PromptEditorMode::Terminal { .. } => vec![ - accept, - IconButton::new("confirm", IconName::PlayFilled) - .icon_color(Color::Info) - .shape(IconButtonShape::Square) - .tooltip(|_window, cx| { - Tooltip::for_action( - "Execute Generated Command", - &menu::SecondaryConfirm, - cx, - ) - }) - .on_click(cx.listener(|_, _, _, cx| { - cx.emit(PromptEditorEvent::ConfirmRequested { execute: true }); - })) + let mut buttons = Vec::new(); + + if show_rating_buttons { + buttons.push( + h_flex() + .pl_1() + .gap_1() + .border_l_1() + .border_color(cx.theme().colors().border_variant) + .child( + IconButton::new("thumbs-up", IconName::ThumbsUp) + .shape(IconButtonShape::Square) + .map(|this| { + if rated { + this.disabled(true) + .icon_color(Color::Ignored) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Good Result", + None, + "You already rated this result", + cx, + ) + }) + } else { + this.icon_color(Color::Muted) + .tooltip(Tooltip::text("Good Result")) + } + }) + .on_click(cx.listener(|this, _, window, cx| { + this.thumbs_up(&ThumbsUpResult, window, cx); + })), + ) + .child( + IconButton::new("thumbs-down", IconName::ThumbsDown) + .shape(IconButtonShape::Square) + .map(|this| { + if rated { + this.disabled(true) + .icon_color(Color::Ignored) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Bad Result", + None, + "You already rated this result", + cx, + ) + }) + } else { + this.icon_color(Color::Muted) + .tooltip(Tooltip::text("Bad Result")) + } + }) + .on_click(cx.listener(|this, _, window, cx| { + this.thumbs_down(&ThumbsDownResult, window, cx); + })), + ) .into_any_element(), - ], - PromptEditorMode::Buffer { .. } => vec![accept], + ); + } + + buttons.push(accept); + + match &self.mode { + PromptEditorMode::Terminal { .. } => { + buttons.push( + IconButton::new("confirm", IconName::PlayFilled) + .icon_color(Color::Info) + .shape(IconButtonShape::Square) + .tooltip(|_window, cx| { + Tooltip::for_action( + "Execute Generated Command", + &menu::SecondaryConfirm, + cx, + ) + }) + .on_click(cx.listener(|_, _, _, cx| { + cx.emit(PromptEditorEvent::ConfirmRequested { + execute: true, + }); + })) + .into_any_element(), + ); + buttons + } + PromptEditorMode::Buffer { .. } => buttons, } } } @@ -568,10 +960,21 @@ impl PromptEditor { } fn render_close_button(&self, cx: &mut Context) -> AnyElement { + let focus_handle = self.editor.focus_handle(cx); + IconButton::new("cancel", IconName::Close) .icon_color(Color::Muted) .shape(IconButtonShape::Square) - .tooltip(Tooltip::text("Close Assistant")) + .tooltip({ + move |_window, cx| { + Tooltip::for_action_in( + "Close Assistant", + &editor::actions::Cancel, + &focus_handle, + cx, + ) + } + }) .on_click(cx.listener(|_, _, _, cx| cx.emit(PromptEditorEvent::CancelRequested))) .into_any_element() } @@ -709,6 +1112,7 @@ impl PromptEditor { EditorStyle { background: colors.editor_background, local_player: cx.theme().players().local(), + syntax: cx.theme().syntax().clone(), text: text_style, ..Default::default() }, @@ -717,19 +1121,8 @@ impl PromptEditor { .into_any_element() } - fn handle_context_strip_event( - &mut self, - _context_strip: &Entity, - event: &ContextStripEvent, - window: &mut Window, - cx: &mut Context, - ) { - match event { - ContextStripEvent::PickerDismissed - | ContextStripEvent::BlurredEmpty - | ContextStripEvent::BlurredUp => self.editor.focus_handle(cx).focus(window), - ContextStripEvent::BlurredDown => {} - } + fn render_markdown(&self, markdown: Entity, style: MarkdownStyle) -> MarkdownElement { + MarkdownElement::new(markdown, style) } } @@ -765,6 +1158,36 @@ impl InlineAssistId { } } +struct PromptEditorCompletionProviderDelegate; + +fn inline_assistant_model_supports_images(cx: &App) -> bool { + LanguageModelRegistry::read_global(cx) + .inline_assistant_model() + .map_or(false, |m| m.model.supports_images()) +} + +impl PromptCompletionProviderDelegate for PromptEditorCompletionProviderDelegate { + fn supported_modes(&self, _cx: &App) -> Vec { + vec![ + PromptContextType::File, + PromptContextType::Symbol, + PromptContextType::Thread, + PromptContextType::Fetch, + PromptContextType::Rules, + ] + } + + fn supports_images(&self, cx: &App) -> bool { + inline_assistant_model_supports_images(cx) + } + + fn available_commands(&self, _cx: &App) -> Vec { + Vec::new() + } + + fn confirm_command(&self, _cx: &mut App) {} +} + impl PromptEditor { pub fn new_buffer( id: InlineAssistId, @@ -772,16 +1195,16 @@ impl PromptEditor { prompt_history: VecDeque, prompt_buffer: Entity, codegen: Entity, + session_id: Uuid, fs: Arc, - context_store: Entity, + history_store: Entity, + prompt_store: Option>, + project: WeakEntity, workspace: WeakEntity, - thread_store: Option>, - prompt_store: Option>, window: &mut Window, cx: &mut Context>, ) -> PromptEditor { let codegen_subscription = cx.observe(&codegen, Self::handle_codegen_changed); - let codegen_buffer = codegen.read(cx).buffer(cx).read(cx).as_singleton(); let mode = PromptEditorMode::Buffer { id, codegen, @@ -805,7 +1228,6 @@ impl PromptEditor { // typing in one will make what you typed appear in all of them. editor.set_show_cursor_when_unfocused(true, cx); editor.set_placeholder_text(&Self::placeholder_text(&mode, window, cx), window, cx); - editor.register_addon(ContextCreasesAddon::new()); editor.set_context_menu_options(ContextMenuOptions { min_entries_visible: 12, max_entries_visible: 12, @@ -815,43 +1237,17 @@ impl PromptEditor { editor }); - let prompt_editor_entity = prompt_editor.downgrade(); - prompt_editor.update(cx, |editor, _| { - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - workspace.clone(), - context_store.downgrade(), - thread_store.clone(), - prompt_store.clone(), - prompt_editor_entity, - codegen_buffer.as_ref().map(Entity::downgrade), - )))); - }); + let mention_set = + cx.new(|_cx| MentionSet::new(project, history_store.clone(), prompt_store.clone())); - let context_picker_menu_handle = PopoverMenuHandle::default(); let model_selector_menu_handle = PopoverMenuHandle::default(); - let context_strip = cx.new(|cx| { - ContextStrip::new( - context_store.clone(), - workspace.clone(), - thread_store.clone(), - prompt_store, - context_picker_menu_handle.clone(), - SuggestContextKind::Thread, - ModelUsageContext::InlineAssistant, - window, - cx, - ) - }); - - let context_strip_subscription = - cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event); - let mut this: PromptEditor = PromptEditor { editor: prompt_editor.clone(), - context_store, - context_strip, - context_picker_menu_handle, + mention_set, + history_store, + prompt_store, + workspace, model_selector: cx.new(|cx| { AgentModelSelector::new( fs, @@ -868,19 +1264,23 @@ impl PromptEditor { pending_prompt: String::new(), _codegen_subscription: codegen_subscription, editor_subscriptions: Vec::new(), - _context_strip_subscription: context_strip_subscription, show_rate_limit_notice: false, mode, + session_state: SessionState { + session_id, + completion: CompletionState::Pending, + }, _phantom: Default::default(), }; + this.assign_completion_provider(cx); this.subscribe_to_editor(window, cx); this } fn handle_codegen_changed( &mut self, - _: Entity, + codegen: Entity, cx: &mut Context>, ) { match self.codegen_status(cx) { @@ -889,10 +1289,15 @@ impl PromptEditor { .update(cx, |editor, _| editor.set_read_only(false)); } CodegenStatus::Pending => { + self.session_state.completion = CompletionState::Pending; self.editor .update(cx, |editor, _| editor.set_read_only(true)); } CodegenStatus::Done => { + let completion = codegen.read(cx).active_completion(cx); + self.session_state.completion = CompletionState::Generated { + completion_text: completion, + }; self.edited_since_done = false; self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -919,6 +1324,10 @@ impl PromptEditor { } } + pub fn mention_set(&self) -> &Entity { + &self.mention_set + } + pub fn editor_margins(&self) -> &Arc> { match &self.mode { PromptEditorMode::Buffer { editor_margins, .. } => editor_margins, @@ -944,11 +1353,12 @@ impl PromptEditor { prompt_history: VecDeque, prompt_buffer: Entity, codegen: Entity, + session_id: Uuid, fs: Arc, - context_store: Entity, + history_store: Entity, + prompt_store: Option>, + project: WeakEntity, workspace: WeakEntity, - thread_store: Option>, - prompt_store: Option>, window: &mut Window, cx: &mut Context, ) -> Self { @@ -980,43 +1390,17 @@ impl PromptEditor { editor }); - let prompt_editor_entity = prompt_editor.downgrade(); - prompt_editor.update(cx, |editor, _| { - editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new( - workspace.clone(), - context_store.downgrade(), - thread_store.clone(), - prompt_store.clone(), - prompt_editor_entity, - None, - )))); - }); + let mention_set = + cx.new(|_cx| MentionSet::new(project, history_store.clone(), prompt_store.clone())); - let context_picker_menu_handle = PopoverMenuHandle::default(); let model_selector_menu_handle = PopoverMenuHandle::default(); - let context_strip = cx.new(|cx| { - ContextStrip::new( - context_store.clone(), - workspace.clone(), - thread_store.clone(), - prompt_store.clone(), - context_picker_menu_handle.clone(), - SuggestContextKind::Thread, - ModelUsageContext::InlineAssistant, - window, - cx, - ) - }); - - let context_strip_subscription = - cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event); - let mut this = Self { editor: prompt_editor.clone(), - context_store, - context_strip, - context_picker_menu_handle, + mention_set, + history_store, + prompt_store, + workspace, model_selector: cx.new(|cx| { AgentModelSelector::new( fs, @@ -1033,12 +1417,16 @@ impl PromptEditor { pending_prompt: String::new(), _codegen_subscription: codegen_subscription, editor_subscriptions: Vec::new(), - _context_strip_subscription: context_strip_subscription, mode, show_rate_limit_notice: false, + session_state: SessionState { + session_id, + completion: CompletionState::Pending, + }, _phantom: Default::default(), }; this.count_lines(cx); + this.assign_completion_provider(cx); this.subscribe_to_editor(window, cx); this } @@ -1067,17 +1455,21 @@ impl PromptEditor { } } - fn handle_codegen_changed(&mut self, _: Entity, cx: &mut Context) { + fn handle_codegen_changed(&mut self, codegen: Entity, cx: &mut Context) { match &self.codegen().read(cx).status { CodegenStatus::Idle => { self.editor .update(cx, |editor, _| editor.set_read_only(false)); } CodegenStatus::Pending => { + self.session_state.completion = CompletionState::Pending; self.editor .update(cx, |editor, _| editor.set_read_only(true)); } CodegenStatus::Done | CodegenStatus::Error(_) => { + self.session_state.completion = CompletionState::Generated { + completion_text: codegen.read(cx).completion(), + }; self.edited_since_done = false; self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -1085,6 +1477,10 @@ impl PromptEditor { } } + pub fn mention_set(&self) -> &Entity { + &self.mention_set + } + pub fn codegen(&self) -> &Entity { match &self.mode { PromptEditorMode::Buffer { .. } => unreachable!(), @@ -1164,131 +1560,41 @@ impl GenerationMode { /// Stored information that can be used to resurrect a context crease when creating an editor for a past message. #[derive(Clone, Debug)] -pub struct MessageCrease { - pub range: Range, - pub icon_path: SharedString, - pub label: SharedString, - /// None for a deserialized message, Some otherwise. - pub context: Option, +struct MessageCrease { + range: Range, + icon_path: SharedString, + label: SharedString, } -#[derive(Default)] -pub struct ContextCreasesAddon { - creases: HashMap>, - _subscription: Option, -} - -impl Addon for ContextCreasesAddon { - fn to_any(&self) -> &dyn std::any::Any { - self - } - - fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> { - Some(self) - } -} - -impl ContextCreasesAddon { - pub fn new() -> Self { - Self { - creases: HashMap::default(), - _subscription: None, - } - } - - pub fn add_creases( - &mut self, - context_store: &Entity, - key: AgentContextKey, - creases: impl IntoIterator, - cx: &mut Context, - ) { - self.creases.entry(key).or_default().extend(creases); - self._subscription = Some( - cx.subscribe(context_store, |editor, _, event, cx| match event { - ContextStoreEvent::ContextRemoved(key) => { - let Some(this) = editor.addon_mut::() else { - return; - }; - let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this - .creases - .remove(key) - .unwrap_or_default() - .into_iter() - .unzip(); - let ranges = editor - .remove_creases(crease_ids, cx) - .into_iter() - .map(|(_, range)| range) - .collect::>(); - editor.unfold_ranges(&ranges, false, false, cx); - editor.edit(ranges.into_iter().zip(replacement_texts), cx); - cx.notify(); - } - }), - ) - } - - pub fn into_inner(self) -> HashMap> { - self.creases - } -} - -pub fn extract_message_creases( +fn extract_message_creases( editor: &mut Editor, + mention_set: &Entity, + window: &mut Window, cx: &mut Context<'_, Editor>, ) -> Vec { - let buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let mut contexts_by_crease_id = editor - .addon_mut::() - .map(std::mem::take) - .unwrap_or_default() - .into_inner() - .into_iter() - .flat_map(|(key, creases)| { - let context = key.0; - creases - .into_iter() - .map(move |(id, _)| (id, context.clone())) - }) - .collect::>(); - // Filter the addon's list of creases based on what the editor reports, - // since the addon might have removed creases in it. - - editor.display_map.update(cx, |display_map, cx| { - display_map - .snapshot(cx) - .crease_snapshot - .creases() - .filter_map(|(id, crease)| { - Some(( - id, - ( - crease.range().to_offset(&buffer_snapshot), - crease.metadata()?.clone(), - ), - )) - }) - .map(|(id, (range, metadata))| { - let context = contexts_by_crease_id.remove(&id); - MessageCrease { - range, - context, - label: metadata.label, - icon_path: metadata.icon_path, - } + let creases = mention_set.read(cx).creases(); + let snapshot = editor.snapshot(window, cx); + snapshot + .crease_snapshot + .creases() + .filter(|(id, _)| creases.contains(id)) + .filter_map(|(_, crease)| { + let metadata = crease.metadata()?.clone(); + Some(MessageCrease { + range: crease.range().to_offset(snapshot.buffer()), + label: metadata.label, + icon_path: metadata.icon_path, }) - .collect() - }) + }) + .collect() } -pub fn insert_message_creases( +fn insert_message_creases( editor: &mut Editor, message_creases: &[MessageCrease], - context_store: &Entity, window: &mut Window, cx: &mut Context<'_, Editor>, -) { +) -> Vec { let buffer_snapshot = editor.buffer().read(cx).snapshot(cx); let creases = message_creases .iter() @@ -1305,12 +1611,5 @@ pub fn insert_message_creases( .collect::>(); let ids = editor.insert_creases(creases.clone(), cx); editor.fold_creases(creases, false, window, cx); - if let Some(addon) = editor.addon_mut::() { - for (crease, id) in message_creases.iter().zip(ids) { - if let Some(context) = crease.context.as_ref() { - let key = AgentContextKey(context.clone()); - addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx); - } - } - } + ids } diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index 0f7b83e3edba6c8d97c2c12a939a65cb71c39dca..77c8c95255908dc54639ad7ac6c55f1e8b8151f0 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -1,32 +1,44 @@ use std::{cmp::Reverse, sync::Arc}; -use collections::{HashSet, IndexMap}; +use agent_settings::AgentSettings; +use collections::{HashMap, HashSet, IndexMap}; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; -use gpui::{Action, AnyElement, App, BackgroundExecutor, DismissEvent, Subscription, Task}; +use gpui::{ + Action, AnyElement, App, BackgroundExecutor, DismissEvent, FocusHandle, Subscription, Task, +}; use language_model::{ - AuthenticateError, ConfiguredModel, LanguageModel, LanguageModelProviderId, - LanguageModelRegistry, + AuthenticateError, ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProvider, + LanguageModelProviderId, LanguageModelRegistry, }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; -use ui::{ListItem, ListItemSpacing, prelude::*}; +use settings::Settings; +use ui::prelude::*; +use zed_actions::agent::OpenSettings; + +use crate::ui::{ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem}; type OnModelChanged = Arc, &mut App) + 'static>; type GetActiveModel = Arc Option + 'static>; +type OnToggleFavorite = Arc, bool, &App) + 'static>; pub type LanguageModelSelector = Picker; pub fn language_model_selector( get_active_model: impl Fn(&App) -> Option + 'static, on_model_changed: impl Fn(Arc, &mut App) + 'static, + on_toggle_favorite: impl Fn(Arc, bool, &App) + 'static, popover_styles: bool, + focus_handle: FocusHandle, window: &mut Window, cx: &mut Context, ) -> LanguageModelSelector { let delegate = LanguageModelPickerDelegate::new( get_active_model, on_model_changed, + on_toggle_favorite, popover_styles, + focus_handle, window, cx, ); @@ -42,7 +54,17 @@ pub fn language_model_selector( } fn all_models(cx: &App) -> GroupedModels { - let providers = LanguageModelRegistry::global(cx).read(cx).providers(); + let lm_registry = LanguageModelRegistry::global(cx).read(cx); + let providers = lm_registry.providers(); + + let mut favorites_index = FavoritesIndex::default(); + + for sel in &AgentSettings::get_global(cx).favorite_models { + favorites_index + .entry(sel.provider.0.clone().into()) + .or_default() + .insert(sel.model.clone().into()); + } let recommended = providers .iter() @@ -50,51 +72,70 @@ fn all_models(cx: &App) -> GroupedModels { provider .recommended_models(cx) .into_iter() - .map(|model| ModelInfo { - model, - icon: provider.icon(), - }) + .map(|model| ModelInfo::new(&**provider, model, &favorites_index)) }) .collect(); - let other = providers + let all = providers .iter() .flat_map(|provider| { provider .provided_models(cx) .into_iter() - .map(|model| ModelInfo { - model, - icon: provider.icon(), - }) + .map(|model| ModelInfo::new(&**provider, model, &favorites_index)) }) .collect(); - GroupedModels::new(other, recommended) + GroupedModels::new(all, recommended) } +type FavoritesIndex = HashMap>; + #[derive(Clone)] struct ModelInfo { model: Arc, icon: IconName, + is_favorite: bool, +} + +impl ModelInfo { + fn new( + provider: &dyn LanguageModelProvider, + model: Arc, + favorites_index: &FavoritesIndex, + ) -> Self { + let is_favorite = favorites_index + .get(&provider.id()) + .map_or(false, |set| set.contains(&model.id())); + + Self { + model, + icon: provider.icon(), + is_favorite, + } + } } pub struct LanguageModelPickerDelegate { on_model_changed: OnModelChanged, get_active_model: GetActiveModel, + on_toggle_favorite: OnToggleFavorite, all_models: Arc, filtered_entries: Vec, selected_index: usize, _authenticate_all_providers_task: Task<()>, _subscriptions: Vec, popover_styles: bool, + focus_handle: FocusHandle, } impl LanguageModelPickerDelegate { fn new( get_active_model: impl Fn(&App) -> Option + 'static, on_model_changed: impl Fn(Arc, &mut App) + 'static, + on_toggle_favorite: impl Fn(Arc, bool, &App) + 'static, popover_styles: bool, + focus_handle: FocusHandle, window: &mut Window, cx: &mut Context>, ) -> Self { @@ -108,6 +149,7 @@ impl LanguageModelPickerDelegate { selected_index: Self::get_active_model_index(&entries, get_active_model(cx)), filtered_entries: entries, get_active_model: Arc::new(get_active_model), + on_toggle_favorite: Arc::new(on_toggle_favorite), _authenticate_all_providers_task: Self::authenticate_all_providers(cx), _subscriptions: vec![cx.subscribe_in( &LanguageModelRegistry::global(cx), @@ -128,6 +170,7 @@ impl LanguageModelPickerDelegate { }, )], popover_styles, + focus_handle, } } @@ -206,80 +249,104 @@ impl LanguageModelPickerDelegate { pub fn active_model(&self, cx: &App) -> Option { (self.get_active_model)(cx) } + + pub fn cycle_favorite_models(&mut self, window: &mut Window, cx: &mut Context>) { + if self.all_models.favorites.is_empty() { + return; + } + + let active_model = (self.get_active_model)(cx); + let active_provider_id = active_model.as_ref().map(|m| m.provider.id()); + let active_model_id = active_model.as_ref().map(|m| m.model.id()); + + let current_index = self + .all_models + .favorites + .iter() + .position(|info| { + Some(info.model.provider_id()) == active_provider_id + && Some(info.model.id()) == active_model_id + }) + .unwrap_or(usize::MAX); + + let next_index = if current_index == usize::MAX { + 0 + } else { + (current_index + 1) % self.all_models.favorites.len() + }; + + let next_model = self.all_models.favorites[next_index].model.clone(); + + (self.on_model_changed)(next_model, cx); + + // Align the picker selection with the newly-active model + let new_index = + Self::get_active_model_index(&self.filtered_entries, (self.get_active_model)(cx)); + self.set_selected_index(new_index, window, cx); + } } struct GroupedModels { + favorites: Vec, recommended: Vec, - other: IndexMap>, + all: IndexMap>, } impl GroupedModels { - pub fn new(other: Vec, recommended: Vec) -> Self { - let recommended_ids = recommended + pub fn new(all: Vec, recommended: Vec) -> Self { + let favorites = all .iter() - .map(|info| (info.model.provider_id(), info.model.id())) - .collect::>(); - - let mut other_by_provider: IndexMap<_, Vec> = IndexMap::default(); - for model in other { - if recommended_ids.contains(&(model.model.provider_id(), model.model.id())) { - continue; - } + .filter(|info| info.is_favorite) + .cloned() + .collect(); + let mut all_by_provider: IndexMap<_, Vec> = IndexMap::default(); + for model in all { let provider = model.model.provider_id(); - if let Some(models) = other_by_provider.get_mut(&provider) { + if let Some(models) = all_by_provider.get_mut(&provider) { models.push(model); } else { - other_by_provider.insert(provider, vec![model]); + all_by_provider.insert(provider, vec![model]); } } Self { + favorites, recommended, - other: other_by_provider, + all: all_by_provider, } } fn entries(&self) -> Vec { let mut entries = Vec::new(); + if !self.favorites.is_empty() { + entries.push(LanguageModelPickerEntry::Separator("Favorite".into())); + for info in &self.favorites { + entries.push(LanguageModelPickerEntry::Model(info.clone())); + } + } + if !self.recommended.is_empty() { entries.push(LanguageModelPickerEntry::Separator("Recommended".into())); - entries.extend( - self.recommended - .iter() - .map(|info| LanguageModelPickerEntry::Model(info.clone())), - ); + for info in &self.recommended { + entries.push(LanguageModelPickerEntry::Model(info.clone())); + } } - for models in self.other.values() { + for models in self.all.values() { if models.is_empty() { continue; } entries.push(LanguageModelPickerEntry::Separator( models[0].model.provider_name().0, )); - entries.extend( - models - .iter() - .map(|info| LanguageModelPickerEntry::Model(info.clone())), - ); + for info in models { + entries.push(LanguageModelPickerEntry::Model(info.clone())); + } } - entries - } - fn model_infos(&self) -> Vec { - let other = self - .other - .values() - .flat_map(|model| model.iter()) - .cloned() - .collect::>(); - self.recommended - .iter() - .chain(&other) - .cloned() - .collect::>() + entries } } @@ -425,8 +492,9 @@ impl PickerDelegate for LanguageModelPickerDelegate { .collect::>(); let available_models = all_models - .model_infos() - .iter() + .all + .values() + .flat_map(|models| models.iter()) .filter(|m| configured_provider_ids.contains(&m.model.provider_id())) .cloned() .collect::>(); @@ -478,23 +546,9 @@ impl PickerDelegate for LanguageModelPickerDelegate { cx: &mut Context>, ) -> Option { match self.filtered_entries.get(ix)? { - LanguageModelPickerEntry::Separator(title) => Some( - div() - .px_2() - .pb_1() - .when(ix > 1, |this| { - this.mt_1() - .pt_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - }) - .child( - Label::new(title) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - .into_any_element(), - ), + LanguageModelPickerEntry::Separator(title) => { + Some(ModelSelectorHeader::new(title, ix > 1).into_any_element()) + } LanguageModelPickerEntry::Model(model_info) => { let active_model = (self.get_active_model)(cx); let active_provider_id = active_model.as_ref().map(|m| m.provider.id()); @@ -503,37 +557,20 @@ impl PickerDelegate for LanguageModelPickerDelegate { let is_selected = Some(model_info.model.provider_id()) == active_provider_id && Some(model_info.model.id()) == active_model_id; - let model_icon_color = if is_selected { - Color::Accent - } else { - Color::Muted + let is_favorite = model_info.is_favorite; + let handle_action_click = { + let model = model_info.model.clone(); + let on_toggle_favorite = self.on_toggle_favorite.clone(); + move |cx: &App| on_toggle_favorite(model.clone(), !is_favorite, cx) }; Some( - ListItem::new(ix) - .inset(true) - .spacing(ListItemSpacing::Sparse) - .toggle_state(selected) - .start_slot( - Icon::new(model_info.icon) - .color(model_icon_color) - .size(IconSize::Small), - ) - .child( - h_flex() - .w_full() - .pl_0p5() - .gap_1p5() - .w(px(240.)) - .child(Label::new(model_info.model.name().0).truncate()), - ) - .end_slot(div().pr_3().when(is_selected, |this| { - this.child( - Icon::new(IconName::Check) - .color(Color::Accent) - .size(IconSize::Small), - ) - })) + ModelSelectorListItem::new(ix, model_info.model.name().0) + .icon(model_info.icon) + .is_selected(is_selected) + .is_focused(selected) + .is_favorite(is_favorite) + .on_toggle_favorite(handle_action_click) .into_any_element(), ) } @@ -543,35 +580,15 @@ impl PickerDelegate for LanguageModelPickerDelegate { fn render_footer( &self, _window: &mut Window, - cx: &mut Context>, + _cx: &mut Context>, ) -> Option { + let focus_handle = self.focus_handle.clone(); + if !self.popover_styles { return None; } - Some( - h_flex() - .w_full() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .p_1() - .gap_4() - .justify_between() - .child( - Button::new("configure", "Configure") - .icon(IconName::Settings) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) - .on_click(|_, window, cx| { - window.dispatch_action( - zed_actions::agent::OpenSettings.boxed_clone(), - cx, - ); - }), - ) - .into_any(), - ) + Some(ModelSelectorFooter::new(OpenSettings.boxed_clone(), focus_handle).into_any_element()) } } @@ -670,11 +687,24 @@ mod tests { } fn create_models(model_specs: Vec<(&str, &str)>) -> Vec { + create_models_with_favorites(model_specs, vec![]) + } + + fn create_models_with_favorites( + model_specs: Vec<(&str, &str)>, + favorites: Vec<(&str, &str)>, + ) -> Vec { model_specs .into_iter() - .map(|(provider, name)| ModelInfo { - model: Arc::new(TestLanguageModel::new(name, provider)), - icon: IconName::Ai, + .map(|(provider, name)| { + let is_favorite = favorites + .iter() + .any(|(fav_provider, fav_name)| *fav_provider == provider && *fav_name == name); + ModelInfo { + model: Arc::new(TestLanguageModel::new(name, provider)), + icon: IconName::Ai, + is_favorite, + } }) .collect() } @@ -764,46 +794,141 @@ mod tests { } #[gpui::test] - fn test_exclude_recommended_models(_cx: &mut TestAppContext) { + fn test_recommended_models_also_appear_in_other(_cx: &mut TestAppContext) { let recommended_models = create_models(vec![("zed", "claude")]); let all_models = create_models(vec![ - ("zed", "claude"), // Should be filtered out from "other" + ("zed", "claude"), // Should also appear in "other" ("zed", "gemini"), ("copilot", "o3"), ]); let grouped_models = GroupedModels::new(all_models, recommended_models); - let actual_other_models = grouped_models - .other + let actual_all_models = grouped_models + .all .values() .flatten() .cloned() .collect::>(); - // Recommended models should not appear in "other" - assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/o3"]); + // Recommended models should also appear in "all" + assert_models_eq( + actual_all_models, + vec!["zed/claude", "zed/gemini", "copilot/o3"], + ); } #[gpui::test] - fn test_dont_exclude_models_from_other_providers(_cx: &mut TestAppContext) { + fn test_models_from_different_providers(_cx: &mut TestAppContext) { let recommended_models = create_models(vec![("zed", "claude")]); let all_models = create_models(vec![ - ("zed", "claude"), // Should be filtered out from "other" + ("zed", "claude"), // Should also appear in "other" ("zed", "gemini"), - ("copilot", "claude"), // Should not be filtered out from "other" + ("copilot", "claude"), // Different provider, should appear in "other" ]); let grouped_models = GroupedModels::new(all_models, recommended_models); - let actual_other_models = grouped_models - .other + let actual_all_models = grouped_models + .all .values() .flatten() .cloned() .collect::>(); - // Recommended models should not appear in "other" - assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/claude"]); + // All models should appear in "all" regardless of recommended status + assert_models_eq( + actual_all_models, + vec!["zed/claude", "zed/gemini", "copilot/claude"], + ); + } + + #[gpui::test] + fn test_favorites_section_appears_when_favorites_exist(_cx: &mut TestAppContext) { + let recommended_models = create_models(vec![("zed", "claude")]); + let all_models = create_models_with_favorites( + vec![("zed", "claude"), ("zed", "gemini"), ("openai", "gpt-4")], + vec![("zed", "gemini")], + ); + + let grouped_models = GroupedModels::new(all_models, recommended_models); + let entries = grouped_models.entries(); + + assert!(matches!( + entries.first(), + Some(LanguageModelPickerEntry::Separator(s)) if s == "Favorite" + )); + + assert_models_eq(grouped_models.favorites, vec!["zed/gemini"]); + } + + #[gpui::test] + fn test_no_favorites_section_when_no_favorites(_cx: &mut TestAppContext) { + let recommended_models = create_models(vec![("zed", "claude")]); + let all_models = create_models(vec![("zed", "claude"), ("zed", "gemini")]); + + let grouped_models = GroupedModels::new(all_models, recommended_models); + let entries = grouped_models.entries(); + + assert!(matches!( + entries.first(), + Some(LanguageModelPickerEntry::Separator(s)) if s == "Recommended" + )); + + assert!(grouped_models.favorites.is_empty()); + } + + #[gpui::test] + fn test_models_have_correct_actions(_cx: &mut TestAppContext) { + let recommended_models = + create_models_with_favorites(vec![("zed", "claude")], vec![("zed", "claude")]); + let all_models = create_models_with_favorites( + vec![("zed", "claude"), ("zed", "gemini"), ("openai", "gpt-4")], + vec![("zed", "claude")], + ); + + let grouped_models = GroupedModels::new(all_models, recommended_models); + let entries = grouped_models.entries(); + + for entry in &entries { + if let LanguageModelPickerEntry::Model(info) = entry { + if info.model.telemetry_id() == "zed/claude" { + assert!(info.is_favorite, "zed/claude should be a favorite"); + } else { + assert!( + !info.is_favorite, + "{} should not be a favorite", + info.model.telemetry_id() + ); + } + } + } + } + + #[gpui::test] + fn test_favorites_appear_in_other_sections(_cx: &mut TestAppContext) { + let favorites = vec![("zed", "gemini"), ("openai", "gpt-4")]; + + let recommended_models = + create_models_with_favorites(vec![("zed", "claude")], favorites.clone()); + + let all_models = create_models_with_favorites( + vec![ + ("zed", "claude"), + ("zed", "gemini"), + ("openai", "gpt-4"), + ("openai", "gpt-3.5"), + ], + favorites, + ); + + let grouped_models = GroupedModels::new(all_models, recommended_models); + + assert_models_eq(grouped_models.favorites, vec!["zed/gemini", "openai/gpt-4"]); + assert_models_eq(grouped_models.recommended, vec!["zed/claude"]); + assert_models_eq( + grouped_models.all.values().flatten().cloned().collect(), + vec!["zed/claude", "zed/gemini", "openai/gpt-4", "openai/gpt-3.5"], + ); } } diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs new file mode 100644 index 0000000000000000000000000000000000000000..eee28bbfb2d36ce8f41e64cafd2e8f24b504f97f --- /dev/null +++ b/crates/agent_ui/src/mention_set.rs @@ -0,0 +1,1098 @@ +use acp_thread::{MentionUri, selection_name}; +use agent::{HistoryStore, outline}; +use agent_client_protocol as acp; +use agent_servers::{AgentServer, AgentServerDelegate}; +use anyhow::{Context as _, Result, anyhow}; +use assistant_slash_commands::codeblock_fence_for_path; +use collections::{HashMap, HashSet}; +use editor::{ + Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset, + display_map::{Crease, CreaseId, CreaseMetadata, FoldId}, + scroll::Autoscroll, +}; +use futures::{AsyncReadExt as _, FutureExt as _, future::Shared}; +use gpui::{ + Animation, AnimationExt as _, AppContext, ClipboardEntry, Context, Empty, Entity, EntityId, + Image, ImageFormat, Img, SharedString, Task, WeakEntity, pulsating_between, +}; +use http_client::{AsyncBody, HttpClientWithUrl}; +use itertools::Either; +use language::Buffer; +use language_model::LanguageModelImage; +use multi_buffer::MultiBufferRow; +use postage::stream::Stream as _; +use project::{Project, ProjectItem, ProjectPath, Worktree}; +use prompt_store::{PromptId, PromptStore}; +use rope::Point; +use std::{ + cell::RefCell, + ffi::OsStr, + fmt::Write, + ops::{Range, RangeInclusive}, + path::{Path, PathBuf}, + rc::Rc, + sync::Arc, + time::Duration, +}; +use text::OffsetRangeExt; +use ui::{ButtonLike, Disclosure, TintColor, Toggleable, prelude::*}; +use util::{ResultExt, debug_panic, rel_path::RelPath}; +use workspace::{Workspace, notifications::NotifyResultExt as _}; + +pub type MentionTask = Shared>>; + +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Mention { + Text { + content: String, + tracked_buffers: Vec>, + }, + Image(MentionImage), + Link, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct MentionImage { + pub data: SharedString, + pub format: ImageFormat, +} + +pub struct MentionSet { + project: WeakEntity, + history_store: Entity, + prompt_store: Option>, + mentions: HashMap, +} + +impl MentionSet { + pub fn new( + project: WeakEntity, + history_store: Entity, + prompt_store: Option>, + ) -> Self { + Self { + project, + history_store, + prompt_store, + mentions: HashMap::default(), + } + } + + pub fn contents( + &self, + full_mention_content: bool, + cx: &mut App, + ) -> Task>> { + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow!("Project not found"))); + }; + let mentions = self.mentions.clone(); + cx.spawn(async move |cx| { + let mut contents = HashMap::default(); + for (crease_id, (mention_uri, task)) in mentions { + let content = if full_mention_content + && let MentionUri::Directory { abs_path } = &mention_uri + { + cx.update(|cx| full_mention_for_directory(&project, abs_path, cx))? + .await? + } else { + task.await.map_err(|e| anyhow!("{e}"))? + }; + + contents.insert(crease_id, (mention_uri, content)); + } + Ok(contents) + }) + } + + pub fn remove_invalid(&mut self, snapshot: &EditorSnapshot) { + for (crease_id, crease) in snapshot.crease_snapshot.creases() { + if !crease.range().start.is_valid(snapshot.buffer_snapshot()) { + self.mentions.remove(&crease_id); + } + } + } + + pub fn insert_mention(&mut self, crease_id: CreaseId, uri: MentionUri, task: MentionTask) { + self.mentions.insert(crease_id, (uri, task)); + } + + pub fn remove_mention(&mut self, crease_id: &CreaseId) { + self.mentions.remove(crease_id); + } + + pub fn creases(&self) -> HashSet { + self.mentions.keys().cloned().collect() + } + + pub fn mentions(&self) -> HashSet { + self.mentions.values().map(|(uri, _)| uri.clone()).collect() + } + + pub fn set_mentions(&mut self, mentions: HashMap) { + self.mentions = mentions; + } + + pub fn clear(&mut self) -> impl Iterator { + self.mentions.drain() + } + + pub fn confirm_mention_completion( + &mut self, + crease_text: SharedString, + start: text::Anchor, + content_len: usize, + mention_uri: MentionUri, + supports_images: bool, + editor: Entity, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let Some(project) = self.project.upgrade() else { + return Task::ready(()); + }; + + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); + let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else { + return Task::ready(()); + }; + let excerpt_id = start_anchor.excerpt_id; + let end_anchor = snapshot.buffer_snapshot().anchor_before( + start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize, + ); + + let crease = if let MentionUri::File { abs_path } = &mention_uri + && let Some(extension) = abs_path.extension() + && let Some(extension) = extension.to_str() + && Img::extensions().contains(&extension) + && !extension.contains("svg") + { + let Some(project_path) = project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + log::error!("project path not found"); + return Task::ready(()); + }; + let image_task = project.update(cx, |project, cx| project.open_image(project_path, cx)); + let image = cx + .spawn(async move |_, cx| { + let image = image_task.await.map_err(|e| e.to_string())?; + let image = image + .update(cx, |image, _| image.image.clone()) + .map_err(|e| e.to_string())?; + Ok(image) + }) + .shared(); + insert_crease_for_mention( + excerpt_id, + start, + content_len, + mention_uri.name().into(), + IconName::Image.path().into(), + Some(image), + editor.clone(), + window, + cx, + ) + } else { + insert_crease_for_mention( + excerpt_id, + start, + content_len, + crease_text, + mention_uri.icon_path(cx), + None, + editor.clone(), + window, + cx, + ) + }; + let Some((crease_id, tx)) = crease else { + return Task::ready(()); + }; + + let task = match mention_uri.clone() { + MentionUri::Fetch { url } => { + self.confirm_mention_for_fetch(url, workspace.read(cx).client().http_client(), cx) + } + MentionUri::Directory { .. } => Task::ready(Ok(Mention::Link)), + MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx), + MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx), + MentionUri::File { abs_path } => { + self.confirm_mention_for_file(abs_path, supports_images, cx) + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } => self.confirm_mention_for_symbol(abs_path, line_range, cx), + MentionUri::Rule { id, .. } => self.confirm_mention_for_rule(id, cx), + MentionUri::PastedImage => { + debug_panic!("pasted image URI should not be included in completions"); + Task::ready(Err(anyhow!( + "pasted imaged URI should not be included in completions" + ))) + } + MentionUri::Selection { .. } => { + debug_panic!("unexpected selection URI"); + Task::ready(Err(anyhow!("unexpected selection URI"))) + } + }; + let task = cx + .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) + .shared(); + self.mentions.insert(crease_id, (mention_uri, task.clone())); + + // Notify the user if we failed to load the mentioned context + cx.spawn_in(window, async move |this, cx| { + let result = task.await.notify_async_err(cx); + drop(tx); + if result.is_none() { + this.update(cx, |this, cx| { + editor.update(cx, |editor, cx| { + // Remove mention + editor.edit([(start_anchor..end_anchor, "")], cx); + }); + this.mentions.remove(&crease_id); + }) + .ok(); + } + }) + } + + pub fn confirm_mention_for_file( + &self, + abs_path: PathBuf, + supports_images: bool, + cx: &mut Context, + ) -> Task> { + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow!("project not found"))); + }; + + let Some(project_path) = project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(Err(anyhow!("project path not found"))); + }; + let extension = abs_path + .extension() + .and_then(OsStr::to_str) + .unwrap_or_default(); + + if Img::extensions().contains(&extension) && !extension.contains("svg") { + if !supports_images { + return Task::ready(Err(anyhow!("This model does not support images yet"))); + } + let task = project.update(cx, |project, cx| project.open_image(project_path, cx)); + return cx.spawn(async move |_, cx| { + let image = task.await?; + let image = image.update(cx, |image, _| image.image.clone())?; + let format = image.format; + let image = cx + .update(|cx| LanguageModelImage::from_image(image, cx))? + .await; + if let Some(image) = image { + Ok(Mention::Image(MentionImage { + data: image.source, + format, + })) + } else { + Err(anyhow!("Failed to convert image")) + } + }); + } + + let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); + cx.spawn(async move |_, cx| { + let buffer = buffer.await?; + let buffer_content = outline::get_buffer_content_or_outline( + buffer.clone(), + Some(&abs_path.to_string_lossy()), + &cx, + ) + .await?; + + Ok(Mention::Text { + content: buffer_content.text, + tracked_buffers: vec![buffer], + }) + }) + } + + fn confirm_mention_for_fetch( + &self, + url: url::Url, + http_client: Arc, + cx: &mut Context, + ) -> Task> { + cx.background_executor().spawn(async move { + let content = fetch_url_content(http_client, url.to_string()).await?; + Ok(Mention::Text { + content, + tracked_buffers: Vec::new(), + }) + }) + } + + fn confirm_mention_for_symbol( + &self, + abs_path: PathBuf, + line_range: RangeInclusive, + cx: &mut Context, + ) -> Task> { + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow!("project not found"))); + }; + let Some(project_path) = project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(Err(anyhow!("project path not found"))); + }; + let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); + cx.spawn(async move |_, cx| { + let buffer = buffer.await?; + let mention = buffer.update(cx, |buffer, cx| { + let start = Point::new(*line_range.start(), 0).min(buffer.max_point()); + let end = Point::new(*line_range.end() + 1, 0).min(buffer.max_point()); + let content = buffer.text_for_range(start..end).collect(); + Mention::Text { + content, + tracked_buffers: vec![cx.entity()], + } + })?; + anyhow::Ok(mention) + }) + } + + fn confirm_mention_for_rule( + &mut self, + id: PromptId, + cx: &mut Context, + ) -> Task> { + let Some(prompt_store) = self.prompt_store.as_ref() else { + return Task::ready(Err(anyhow!("Missing prompt store"))); + }; + let prompt = prompt_store.read(cx).load(id, cx); + cx.spawn(async move |_, _| { + let prompt = prompt.await?; + Ok(Mention::Text { + content: prompt, + tracked_buffers: Vec::new(), + }) + }) + } + + pub fn confirm_mention_for_selection( + &mut self, + source_range: Range, + selections: Vec<(Entity, Range, Range)>, + editor: Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(project) = self.project.upgrade() else { + return; + }; + + let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); + let Some(start) = snapshot.as_singleton_anchor(source_range.start) else { + return; + }; + + let offset = start.to_offset(&snapshot); + + for (buffer, selection_range, range_to_fold) in selections { + let range = snapshot.anchor_after(offset + range_to_fold.start) + ..snapshot.anchor_after(offset + range_to_fold.end); + + let abs_path = buffer + .read(cx) + .project_path(cx) + .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)); + let snapshot = buffer.read(cx).snapshot(); + + let text = snapshot + .text_for_range(selection_range.clone()) + .collect::(); + let point_range = selection_range.to_point(&snapshot); + let line_range = point_range.start.row..=point_range.end.row; + + let uri = MentionUri::Selection { + abs_path: abs_path.clone(), + line_range: line_range.clone(), + }; + let crease = crease_for_mention( + selection_name(abs_path.as_deref(), &line_range).into(), + uri.icon_path(cx), + range, + editor.downgrade(), + ); + + let crease_id = editor.update(cx, |editor, cx| { + let crease_ids = editor.insert_creases(vec![crease.clone()], cx); + editor.fold_creases(vec![crease], false, window, cx); + crease_ids.first().copied().unwrap() + }); + + self.mentions.insert( + crease_id, + ( + uri, + Task::ready(Ok(Mention::Text { + content: text, + tracked_buffers: vec![buffer], + })) + .shared(), + ), + ); + } + + // Take this explanation with a grain of salt but, with creases being + // inserted, GPUI's recomputes the editor layout in the next frames, so + // directly calling `editor.request_autoscroll` wouldn't work as + // expected. We're leveraging `cx.on_next_frame` to wait 2 frames and + // ensure that the layout has been recalculated so that the autoscroll + // request actually shows the cursor's new position. + cx.on_next_frame(window, move |_, window, cx| { + cx.on_next_frame(window, move |_, _, cx| { + editor.update(cx, |editor, cx| { + editor.request_autoscroll(Autoscroll::fit(), cx) + }); + }); + }); + } + + fn confirm_mention_for_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task> { + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow!("project not found"))); + }; + + let server = Rc::new(agent::NativeAgentServer::new( + project.read(cx).fs().clone(), + self.history_store.clone(), + )); + let delegate = AgentServerDelegate::new( + project.read(cx).agent_server_store().clone(), + project.clone(), + None, + None, + ); + let connection = server.connect(None, delegate, cx); + cx.spawn(async move |_, cx| { + let (agent, _) = connection.await?; + let agent = agent.downcast::().unwrap(); + let summary = agent + .0 + .update(cx, |agent, cx| agent.thread_summary(id, cx))? + .await?; + anyhow::Ok(Mention::Text { + content: summary.to_string(), + tracked_buffers: Vec::new(), + }) + }) + } + + fn confirm_mention_for_text_thread( + &mut self, + path: PathBuf, + cx: &mut Context, + ) -> Task> { + let text_thread_task = self.history_store.update(cx, |store, cx| { + store.load_text_thread(path.as_path().into(), cx) + }); + cx.spawn(async move |_, cx| { + let text_thread = text_thread_task.await?; + let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx))?; + Ok(Mention::Text { + content: xml, + tracked_buffers: Vec::new(), + }) + }) + } +} + +pub(crate) fn paste_images_as_context( + editor: Entity, + mention_set: Entity, + window: &mut Window, + cx: &mut App, +) -> Option> { + let clipboard = cx.read_from_clipboard()?; + Some(window.spawn(cx, async move |cx| { + use itertools::Itertools; + let (mut images, paths) = clipboard + .into_entries() + .filter_map(|entry| match entry { + ClipboardEntry::Image(image) => Some(Either::Left(image)), + ClipboardEntry::ExternalPaths(paths) => Some(Either::Right(paths)), + _ => None, + }) + .partition_map::, Vec<_>, _, _, _>(std::convert::identity); + + if !paths.is_empty() { + images.extend( + cx.background_spawn(async move { + let mut images = vec![]; + for path in paths.into_iter().flat_map(|paths| paths.paths().to_owned()) { + let Ok(content) = async_fs::read(path).await else { + continue; + }; + let Ok(format) = image::guess_format(&content) else { + continue; + }; + images.push(gpui::Image::from_bytes( + match format { + image::ImageFormat::Png => gpui::ImageFormat::Png, + image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg, + image::ImageFormat::WebP => gpui::ImageFormat::Webp, + image::ImageFormat::Gif => gpui::ImageFormat::Gif, + image::ImageFormat::Bmp => gpui::ImageFormat::Bmp, + image::ImageFormat::Tiff => gpui::ImageFormat::Tiff, + image::ImageFormat::Ico => gpui::ImageFormat::Ico, + _ => continue, + }, + content, + )); + } + images + }) + .await, + ); + } + + if images.is_empty() { + return; + } + + let replacement_text = MentionUri::PastedImage.as_link().to_string(); + cx.update(|_window, cx| { + cx.stop_propagation(); + }) + .ok(); + for image in images { + let Ok((excerpt_id, text_anchor, multibuffer_anchor)) = + editor.update_in(cx, |message_editor, window, cx| { + let snapshot = message_editor.snapshot(window, cx); + let (excerpt_id, _, buffer_snapshot) = + snapshot.buffer_snapshot().as_singleton().unwrap(); + + let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len()); + let multibuffer_anchor = snapshot + .buffer_snapshot() + .anchor_in_excerpt(*excerpt_id, text_anchor); + message_editor.edit( + [( + multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + format!("{replacement_text} "), + )], + cx, + ); + (*excerpt_id, text_anchor, multibuffer_anchor) + }) + else { + break; + }; + + let content_len = replacement_text.len(); + let Some(start_anchor) = multibuffer_anchor else { + continue; + }; + let Ok(end_anchor) = editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len) + }) else { + continue; + }; + let image = Arc::new(image); + let Ok(Some((crease_id, tx))) = cx.update(|window, cx| { + insert_crease_for_mention( + excerpt_id, + text_anchor, + content_len, + MentionUri::PastedImage.name().into(), + IconName::Image.path().into(), + Some(Task::ready(Ok(image.clone())).shared()), + editor.clone(), + window, + cx, + ) + }) else { + continue; + }; + let task = cx + .spawn(async move |cx| { + let format = image.format; + let image = cx + .update(|_, cx| LanguageModelImage::from_image(image, cx)) + .map_err(|e| e.to_string())? + .await; + drop(tx); + if let Some(image) = image { + Ok(Mention::Image(MentionImage { + data: image.source, + format, + })) + } else { + Err("Failed to convert image".into()) + } + }) + .shared(); + + mention_set + .update(cx, |mention_set, _cx| { + mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone()) + }) + .ok(); + + if task.await.notify_async_err(cx).is_none() { + editor + .update(cx, |editor, cx| { + editor.edit([(start_anchor..end_anchor, "")], cx); + }) + .ok(); + mention_set + .update(cx, |mention_set, _cx| { + mention_set.remove_mention(&crease_id) + }) + .ok(); + } + } + })) +} + +pub(crate) fn insert_crease_for_mention( + excerpt_id: ExcerptId, + anchor: text::Anchor, + content_len: usize, + crease_label: SharedString, + crease_icon: SharedString, + // abs_path: Option>, + image: Option, String>>>>, + editor: Entity, + window: &mut Window, + cx: &mut App, +) -> Option<(CreaseId, postage::barrier::Sender)> { + let (tx, rx) = postage::barrier::channel(); + + let crease_id = editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + + let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; + + let start = start.bias_right(&snapshot); + let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); + + let placeholder = FoldPlaceholder { + render: render_mention_fold_button( + crease_label.clone(), + crease_icon.clone(), + start..end, + rx, + image, + cx.weak_entity(), + cx, + ), + merge_adjacent: false, + ..Default::default() + }; + + let crease = Crease::Inline { + range: start..end, + placeholder, + render_toggle: None, + render_trailer: None, + metadata: Some(CreaseMetadata { + label: crease_label, + icon_path: crease_icon, + }), + }; + + let ids = editor.insert_creases(vec![crease.clone()], cx); + editor.fold_creases(vec![crease], false, window, cx); + + Some(ids[0]) + })?; + + Some((crease_id, tx)) +} + +pub(crate) fn crease_for_mention( + label: SharedString, + icon_path: SharedString, + range: Range, + editor_entity: WeakEntity, +) -> Crease { + let placeholder = FoldPlaceholder { + render: render_fold_icon_button(icon_path.clone(), label.clone(), editor_entity), + merge_adjacent: false, + ..Default::default() + }; + + let render_trailer = move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any(); + + Crease::inline(range, placeholder, fold_toggle("mention"), render_trailer) + .with_metadata(CreaseMetadata { icon_path, label }) +} + +fn render_fold_icon_button( + icon_path: SharedString, + label: SharedString, + editor: WeakEntity, +) -> Arc, &mut App) -> AnyElement> { + Arc::new({ + move |fold_id, fold_range, cx| { + let is_in_text_selection = editor + .update(cx, |editor, cx| editor.is_range_selected(&fold_range, cx)) + .unwrap_or_default(); + + ButtonLike::new(fold_id) + .style(ButtonStyle::Filled) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(is_in_text_selection) + .child( + h_flex() + .gap_1() + .child( + Icon::from_path(icon_path.clone()) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(label.clone()) + .size(LabelSize::Small) + .buffer_font(cx) + .single_line(), + ), + ) + .into_any_element() + } + }) +} + +fn fold_toggle( + name: &'static str, +) -> impl Fn( + MultiBufferRow, + bool, + Arc, + &mut Window, + &mut App, +) -> AnyElement { + move |row, is_folded, fold, _window, _cx| { + Disclosure::new((name, row.0 as u64), !is_folded) + .toggle_state(is_folded) + .on_click(move |_e, window, cx| fold(!is_folded, window, cx)) + .into_any_element() + } +} + +fn full_mention_for_directory( + project: &Entity, + abs_path: &Path, + cx: &mut App, +) -> Task> { + fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc, String)> { + let mut files = Vec::new(); + + for entry in worktree.child_entries(path) { + if entry.is_dir() { + files.extend(collect_files_in_path(worktree, &entry.path)); + } else if entry.is_file() { + files.push(( + entry.path.clone(), + worktree + .full_path(&entry.path) + .to_string_lossy() + .to_string(), + )); + } + } + + files + } + + let Some(project_path) = project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(Err(anyhow!("project path not found"))); + }; + let Some(entry) = project.read(cx).entry_for_path(&project_path, cx) else { + return Task::ready(Err(anyhow!("project entry not found"))); + }; + let directory_path = entry.path.clone(); + let worktree_id = project_path.worktree_id; + let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) else { + return Task::ready(Err(anyhow!("worktree not found"))); + }; + let project = project.clone(); + cx.spawn(async move |cx| { + let file_paths = worktree.read_with(cx, |worktree, _cx| { + collect_files_in_path(worktree, &directory_path) + })?; + let descendants_future = cx.update(|cx| { + futures::future::join_all(file_paths.into_iter().map(|(worktree_path, full_path)| { + let rel_path = worktree_path + .strip_prefix(&directory_path) + .log_err() + .map_or_else(|| worktree_path.clone(), |rel_path| rel_path.into()); + + let open_task = project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + let project_path = ProjectPath { + worktree_id, + path: worktree_path, + }; + buffer_store.open_buffer(project_path, cx) + }) + }); + + cx.spawn(async move |cx| { + let buffer = open_task.await.log_err()?; + let buffer_content = outline::get_buffer_content_or_outline( + buffer.clone(), + Some(&full_path), + &cx, + ) + .await + .ok()?; + + Some((rel_path, full_path, buffer_content.text, buffer)) + }) + })) + })?; + + let contents = cx + .background_spawn(async move { + let (contents, tracked_buffers) = descendants_future + .await + .into_iter() + .flatten() + .map(|(rel_path, full_path, rope, buffer)| { + ((rel_path, full_path, rope), buffer) + }) + .unzip(); + Mention::Text { + content: render_directory_contents(contents), + tracked_buffers, + } + }) + .await; + anyhow::Ok(contents) + }) +} + +fn render_directory_contents(entries: Vec<(Arc, String, String)>) -> String { + let mut output = String::new(); + for (_relative_path, full_path, content) in entries { + let fence = codeblock_fence_for_path(Some(&full_path), None); + write!(output, "\n{fence}\n{content}\n```").unwrap(); + } + output +} + +fn render_mention_fold_button( + label: SharedString, + icon: SharedString, + range: Range, + mut loading_finished: postage::barrier::Receiver, + image_task: Option, String>>>>, + editor: WeakEntity, + cx: &mut App, +) -> Arc, &mut App) -> AnyElement> { + let loading = cx.new(|cx| { + let loading = cx.spawn(async move |this, cx| { + loading_finished.recv().await; + this.update(cx, |this: &mut LoadingContext, cx| { + this.loading = None; + cx.notify(); + }) + .ok(); + }); + LoadingContext { + id: cx.entity_id(), + label, + icon, + range, + editor, + loading: Some(loading), + image: image_task.clone(), + } + }); + Arc::new(move |_fold_id, _fold_range, _cx| loading.clone().into_any_element()) +} + +struct LoadingContext { + id: EntityId, + label: SharedString, + icon: SharedString, + range: Range, + editor: WeakEntity, + loading: Option>, + image: Option, String>>>>, +} + +impl Render for LoadingContext { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_in_text_selection = self + .editor + .update(cx, |editor, cx| editor.is_range_selected(&self.range, cx)) + .unwrap_or_default(); + ButtonLike::new(("loading-context", self.id)) + .style(ButtonStyle::Filled) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(is_in_text_selection) + .when_some(self.image.clone(), |el, image_task| { + el.hoverable_tooltip(move |_, cx| { + let image = image_task.peek().cloned().transpose().ok().flatten(); + let image_task = image_task.clone(); + cx.new::(|cx| ImageHover { + image, + _task: cx.spawn(async move |this, cx| { + if let Ok(image) = image_task.clone().await { + this.update(cx, |this, cx| { + if this.image.replace(image).is_none() { + cx.notify(); + } + }) + .ok(); + } + }), + }) + .into() + }) + }) + .child( + h_flex() + .gap_1() + .child( + Icon::from_path(self.icon.clone()) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(self.label.clone()) + .size(LabelSize::Small) + .buffer_font(cx) + .single_line(), + ) + .map(|el| { + if self.loading.is_some() { + el.with_animation( + "loading-context-crease", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.opacity(delta), + ) + .into_any() + } else { + el.into_any() + } + }), + ) + } +} + +struct ImageHover { + image: Option>, + _task: Task<()>, +} + +impl Render for ImageHover { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + if let Some(image) = self.image.clone() { + gpui::img(image).max_w_96().max_h_96().into_any_element() + } else { + gpui::Empty.into_any_element() + } + } +} + +async fn fetch_url_content(http_client: Arc, url: String) -> Result { + #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] + enum ContentType { + Html, + Plaintext, + Json, + } + use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown}; + + let url = if !url.starts_with("https://") && !url.starts_with("http://") { + format!("https://{url}") + } else { + url + }; + + let mut response = http_client.get(&url, AsyncBody::default(), true).await?; + let mut body = Vec::new(); + response + .body_mut() + .read_to_end(&mut body) + .await + .context("error reading response body")?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + anyhow::bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let Some(content_type) = response.headers().get("content-type") else { + anyhow::bail!("missing Content-Type header"); + }; + let content_type = content_type + .to_str() + .context("invalid Content-Type header")?; + let content_type = match content_type { + "text/html" => ContentType::Html, + "text/plain" => ContentType::Plaintext, + "application/json" => ContentType::Json, + _ => ContentType::Html, + }; + + match content_type { + ContentType::Html => { + let mut handlers: Vec = vec![ + Rc::new(RefCell::new(markdown::WebpageChromeRemover)), + Rc::new(RefCell::new(markdown::ParagraphHandler)), + Rc::new(RefCell::new(markdown::HeadingHandler)), + Rc::new(RefCell::new(markdown::ListHandler)), + Rc::new(RefCell::new(markdown::TableHandler::new())), + Rc::new(RefCell::new(markdown::StyledTextHandler)), + ]; + if url.contains("wikipedia.org") { + use html_to_markdown::structure::wikipedia; + + handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover))); + handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler))); + handlers.push(Rc::new( + RefCell::new(wikipedia::WikipediaCodeHandler::new()), + )); + } else { + handlers.push(Rc::new(RefCell::new(markdown::CodeHandler))); + } + convert_html_to_markdown(&body[..], &mut handlers) + } + ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()), + ContentType::Json => { + let json: serde_json::Value = serde_json::from_slice(&body)?; + + Ok(format!( + "```json\n{}\n```", + serde_json::to_string_pretty(&json)? + )) + } + } +} diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index c1949d22e268e8744db7834a58d1a3303fa4e236..ac08070fcefa92854b51bc8a66d4d388d08e087d 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -1,4 +1,4 @@ -use crate::{ManageProfiles, ToggleProfileSelector}; +use crate::{CycleModeSelector, ManageProfiles, ToggleProfileSelector}; use agent_settings::{ AgentProfile, AgentProfileId, AgentSettings, AvailableProfiles, builtin_profiles, }; @@ -70,6 +70,29 @@ impl ProfileSelector { self.picker_handle.clone() } + pub fn cycle_profile(&mut self, cx: &mut Context) { + if !self.provider.profiles_supported(cx) { + return; + } + + let profiles = AgentProfile::available_profiles(cx); + if profiles.is_empty() { + return; + } + + let current_profile_id = self.provider.profile_id(cx); + let current_index = profiles + .keys() + .position(|id| id == ¤t_profile_id) + .unwrap_or(0); + + let next_index = (current_index + 1) % profiles.len(); + + if let Some((next_profile_id, _)) = profiles.get_index(next_index) { + self.provider.set_profile(next_profile_id.clone(), cx); + } + } + fn ensure_picker( &mut self, window: &mut Window, @@ -163,14 +186,29 @@ impl Render for ProfileSelector { PickerPopoverMenu::new( picker, trigger_button, - move |_window, cx| { - Tooltip::for_action_in( - "Toggle Profile Menu", - &ToggleProfileSelector, - &focus_handle, - cx, - ) - }, + Tooltip::element({ + move |_window, cx| { + let container = || h_flex().gap_1().justify_between(); + v_flex() + .gap_1() + .child( + container() + .pb_1() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(Label::new("Cycle Through Profiles")) + .child(KeyBinding::for_action_in( + &CycleModeSelector, + &focus_handle, + cx, + )), + ) + .child(container().child(Label::new("Toggle Profile Menu")).child( + KeyBinding::for_action_in(&ToggleProfileSelector, &focus_handle, cx), + )) + .into_any() + } + }), gpui::Corner::BottomRight, cx, ) @@ -542,7 +580,7 @@ impl PickerDelegate for ProfilePickerDelegate { let is_active = active_id == candidate.id; Some( - ListItem::new(SharedString::from(candidate.id.0.clone())) + ListItem::new(candidate.id.0.clone()) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) diff --git a/crates/agent_ui/src/slash_command.rs b/crates/agent_ui/src/slash_command.rs index c2f26c4f2ed33860196790746dd296e8c617b810..e328ef6725e5e789bd402667da91417ad69a372d 100644 --- a/crates/agent_ui/src/slash_command.rs +++ b/crates/agent_ui/src/slash_command.rs @@ -127,6 +127,8 @@ impl SlashCommandCompletionProvider { new_text, label: command.label(cx), icon_path: None, + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm, source: CompletionSource::Custom, @@ -232,6 +234,8 @@ impl SlashCommandCompletionProvider { icon_path: None, new_text, documentation: None, + match_start: None, + snippet_deduplication_key: None, confirm, insert_text_mode: None, source: CompletionSource::Custom, @@ -337,7 +341,6 @@ impl CompletionProvider for SlashCommandCompletionProvider { position: language::Anchor, _text: &str, _trigger_in_words: bool, - _menu_is_open: bool, cx: &mut Context, ) -> bool { let buffer = buffer.read(cx); diff --git a/crates/agent_ui/src/terminal_codegen.rs b/crates/agent_ui/src/terminal_codegen.rs index 5a4a9d560a16e858dcaedf706f2067a24bc12c5f..e93d3d3991378ddb4156b264be1f0a5ab4d4faac 100644 --- a/crates/agent_ui/src/terminal_codegen.rs +++ b/crates/agent_ui/src/terminal_codegen.rs @@ -1,37 +1,38 @@ use crate::inline_prompt_editor::CodegenStatus; -use client::telemetry::Telemetry; use futures::{SinkExt, StreamExt, channel::mpsc}; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task}; -use language_model::{ - ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, report_assistant_event, -}; -use std::{sync::Arc, time::Instant}; -use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; +use language_model::{ConfiguredModel, LanguageModelRegistry, LanguageModelRequest}; +use std::time::Instant; use terminal::Terminal; +use uuid::Uuid; pub struct TerminalCodegen { pub status: CodegenStatus, - pub telemetry: Option>, terminal: Entity, generation: Task<()>, pub message_id: Option, transaction: Option, + session_id: Uuid, } impl EventEmitter for TerminalCodegen {} impl TerminalCodegen { - pub fn new(terminal: Entity, telemetry: Option>) -> Self { + pub fn new(terminal: Entity, session_id: Uuid) -> Self { Self { terminal, - telemetry, status: CodegenStatus::Idle, generation: Task::ready(()), message_id: None, transaction: None, + session_id, } } + pub fn session_id(&self) -> Uuid { + self.session_id + } + pub fn start(&mut self, prompt_task: Task, cx: &mut Context) { let Some(ConfiguredModel { model, .. }) = LanguageModelRegistry::read_global(cx).inline_assistant_model() @@ -39,15 +40,15 @@ impl TerminalCodegen { return; }; - let model_api_key = model.api_key(cx); - let http_client = cx.http_client(); - let telemetry = self.telemetry.clone(); + let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx); + let session_id = self.session_id; + let model_telemetry_id = model.telemetry_id(); + let model_provider_id = model.provider_id().to_string(); + self.status = CodegenStatus::Pending; self.transaction = Some(TerminalTransaction::start(self.terminal.clone())); self.generation = cx.spawn(async move |this, cx| { let prompt = prompt_task.await; - let model_telemetry_id = model.telemetry_id(); - let model_provider_id = model.provider_id(); let response = model.stream_completion_text(prompt, cx).await; let generate = async { let message_id = response @@ -59,7 +60,7 @@ impl TerminalCodegen { let task = cx.background_spawn({ let message_id = message_id.clone(); - let executor = cx.background_executor().clone(); + let anthropic_reporter = anthropic_reporter.clone(); async move { let mut response_latency = None; let request_start = Instant::now(); @@ -79,24 +80,27 @@ impl TerminalCodegen { let result = task.await; let error_message = result.as_ref().err().map(|error| error.to_string()); - report_assistant_event( - AssistantEventData { - conversation_id: None, - kind: AssistantKind::InlineTerminal, - message_id, - phase: AssistantPhase::Response, - model: model_telemetry_id, - model_provider: model_provider_id.to_string(), - response_latency, - error_message, - language_name: None, - }, - telemetry, - http_client, - model_api_key, - &executor, + + telemetry::event!( + "Assistant Responded", + session_id = session_id.to_string(), + kind = "inline_terminal", + phase = "response", + model = model_telemetry_id, + model_provider = model_provider_id, + language_name = Option::<&str>::None, + message_id = message_id, + response_latency = response_latency, + error_message = error_message, ); + anthropic_reporter.report(language_model::AnthropicEventData { + completion_type: language_model::AnthropicCompletionType::Terminal, + event: language_model::AnthropicEventType::Response, + language_name: None, + message_id, + }); + result?; anyhow::Ok(()) } @@ -135,6 +139,12 @@ impl TerminalCodegen { cx.notify(); } + pub fn completion(&self) -> Option { + self.transaction + .as_ref() + .map(|transaction| transaction.completion.clone()) + } + pub fn stop(&mut self, cx: &mut Context) { self.status = CodegenStatus::Done; self.generation = Task::ready(()); @@ -167,27 +177,32 @@ pub const CLEAR_INPUT: &str = "\x03"; const CARRIAGE_RETURN: &str = "\x0d"; struct TerminalTransaction { + completion: String, terminal: Entity, } impl TerminalTransaction { pub fn start(terminal: Entity) -> Self { - Self { terminal } + Self { + completion: String::new(), + terminal, + } } pub fn push(&mut self, hunk: String, cx: &mut App) { // Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal let input = Self::sanitize_input(hunk); + self.completion.push_str(&input); self.terminal .update(cx, |terminal, _| terminal.input(input.into_bytes())); } - pub fn undo(&self, cx: &mut App) { + pub fn undo(self, cx: &mut App) { self.terminal .update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes())); } - pub fn complete(&self, cx: &mut App) { + pub fn complete(self, cx: &mut App) { self.terminal .update(cx, |terminal, _| terminal.input(CARRIAGE_RETURN.as_bytes())); } diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index 9e653dcce1dcf1487af9998662b57ea4f998c7de..cacbc316bb84e74e5c369451791f777a9bf58e82 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -1,6 +1,5 @@ use crate::{ context::load_context, - context_store::ContextStore, inline_prompt_editor::{ CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId, }, @@ -9,7 +8,7 @@ use crate::{ use agent::HistoryStore; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; -use client::telemetry::Telemetry; + use cloud_llm_client::CompletionIntent; use collections::{HashMap, VecDeque}; use editor::{MultiBuffer, actions::SelectAll}; @@ -18,24 +17,19 @@ use gpui::{App, Entity, Focusable, Global, Subscription, Task, UpdateGlobal, Wea use language::Buffer; use language_model::{ ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - Role, report_assistant_event, + Role, report_anthropic_event, }; use project::Project; use prompt_store::{PromptBuilder, PromptStore}; use std::sync::Arc; -use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; use terminal_view::TerminalView; use ui::prelude::*; use util::ResultExt; +use uuid::Uuid; use workspace::{Toast, Workspace, notifications::NotificationId}; -pub fn init( - fs: Arc, - prompt_builder: Arc, - telemetry: Arc, - cx: &mut App, -) { - cx.set_global(TerminalInlineAssistant::new(fs, prompt_builder, telemetry)); +pub fn init(fs: Arc, prompt_builder: Arc, cx: &mut App) { + cx.set_global(TerminalInlineAssistant::new(fs, prompt_builder)); } const DEFAULT_CONTEXT_LINES: usize = 50; @@ -45,7 +39,6 @@ pub struct TerminalInlineAssistant { next_assist_id: TerminalInlineAssistId, assists: HashMap, prompt_history: VecDeque, - telemetry: Option>, fs: Arc, prompt_builder: Arc, } @@ -53,16 +46,11 @@ pub struct TerminalInlineAssistant { impl Global for TerminalInlineAssistant {} impl TerminalInlineAssistant { - pub fn new( - fs: Arc, - prompt_builder: Arc, - telemetry: Arc, - ) -> Self { + pub fn new(fs: Arc, prompt_builder: Arc) -> Self { Self { next_assist_id: TerminalInlineAssistId::default(), assists: HashMap::default(), prompt_history: VecDeque::default(), - telemetry: Some(telemetry), fs, prompt_builder, } @@ -73,22 +61,22 @@ impl TerminalInlineAssistant { terminal_view: &Entity, workspace: WeakEntity, project: WeakEntity, + thread_store: Entity, prompt_store: Option>, - thread_store: Option>, initial_prompt: Option, window: &mut Window, cx: &mut App, ) { let terminal = terminal_view.read(cx).terminal().clone(); let assist_id = self.next_assist_id.post_inc(); + let session_id = Uuid::new_v4(); let prompt_buffer = cx.new(|cx| { MultiBuffer::singleton( cx.new(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx)), cx, ) }); - let context_store = cx.new(|_cx| ContextStore::new(project)); - let codegen = cx.new(|_| TerminalCodegen::new(terminal, self.telemetry.clone())); + let codegen = cx.new(|_| TerminalCodegen::new(terminal, session_id)); let prompt_editor = cx.new(|cx| { PromptEditor::new_terminal( @@ -96,11 +84,12 @@ impl TerminalInlineAssistant { self.prompt_history.clone(), prompt_buffer.clone(), codegen, + session_id, self.fs.clone(), - context_store.clone(), - workspace.clone(), thread_store.clone(), - prompt_store.as_ref().map(|s| s.downgrade()), + prompt_store.clone(), + project.clone(), + workspace.clone(), window, cx, ) @@ -119,8 +108,6 @@ impl TerminalInlineAssistant { terminal_view, prompt_editor, workspace.clone(), - context_store, - prompt_store, window, cx, ); @@ -140,7 +127,7 @@ impl TerminalInlineAssistant { if let Some(prompt_editor) = assist.prompt_editor.as_ref() { prompt_editor.update(cx, |this, cx| { this.editor.update(cx, |editor, cx| { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.select_all(&SelectAll, window, cx); }); }); @@ -227,6 +214,10 @@ impl TerminalInlineAssistant { assist_id: TerminalInlineAssistId, cx: &mut App, ) -> Result> { + let ConfiguredModel { model, .. } = LanguageModelRegistry::read_global(cx) + .inline_assistant_model() + .context("No inline assistant model")?; + let assist = self.assists.get(&assist_id).context("invalid assist")?; let shell = std::env::var("SHELL").ok(); @@ -243,45 +234,31 @@ impl TerminalInlineAssistant { .ok() .unwrap_or_default(); + let prompt_editor = assist.prompt_editor.clone().context("invalid assist")?; + let prompt = self.prompt_builder.generate_terminal_assistant_prompt( - &assist - .prompt_editor - .clone() - .context("invalid assist")? - .read(cx) - .prompt(cx), + &prompt_editor.read(cx).prompt(cx), shell.as_deref(), working_directory.as_deref(), &latest_output, )?; - let contexts = assist - .context_store - .read(cx) - .context() - .cloned() - .collect::>(); - let context_load_task = assist.workspace.update(cx, |workspace, cx| { - let project = workspace.project(); - load_context(contexts, project, &assist.prompt_store, cx) - })?; - - let ConfiguredModel { model, .. } = LanguageModelRegistry::read_global(cx) - .inline_assistant_model() - .context("No inline assistant model")?; - let temperature = AgentSettings::temperature_for_model(&model, cx); + let mention_set = prompt_editor.read(cx).mention_set().clone(); + let load_context_task = load_context(&mention_set, cx); + Ok(cx.background_spawn(async move { let mut request_message = LanguageModelRequestMessage { role: Role::User, content: vec![], cache: false, + reasoning_details: None, }; - context_load_task - .await - .add_to_request_message(&mut request_message); + if let Some(context) = load_context_task.await { + context.add_to_request_message(&mut request_message); + } request_message.content.push(prompt.into()); @@ -315,7 +292,7 @@ impl TerminalInlineAssistant { .terminal .update(cx, |this, cx| { this.clear_block_below_cursor(cx); - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); }) .log_err(); @@ -323,27 +300,45 @@ impl TerminalInlineAssistant { LanguageModelRegistry::read_global(cx).inline_assistant_model() { let codegen = assist.codegen.read(cx); - let executor = cx.background_executor().clone(); - report_assistant_event( - AssistantEventData { - conversation_id: None, - kind: AssistantKind::InlineTerminal, - message_id: codegen.message_id.clone(), - phase: if undo { - AssistantPhase::Rejected - } else { - AssistantPhase::Accepted - }, - model: model.telemetry_id(), - model_provider: model.provider_id().to_string(), - response_latency: None, - error_message: None, + let session_id = codegen.session_id(); + let message_id = codegen.message_id.clone(); + let model_telemetry_id = model.telemetry_id(); + let model_provider_id = model.provider_id().to_string(); + + let (phase, event_type, anthropic_event_type) = if undo { + ( + "rejected", + "Assistant Response Rejected", + language_model::AnthropicEventType::Reject, + ) + } else { + ( + "accepted", + "Assistant Response Accepted", + language_model::AnthropicEventType::Accept, + ) + }; + + // Fire Zed telemetry + telemetry::event!( + event_type, + kind = "inline_terminal", + phase = phase, + model = model_telemetry_id, + model_provider = model_provider_id, + message_id = message_id, + session_id = session_id, + ); + + report_anthropic_event( + &model, + language_model::AnthropicEventData { + completion_type: language_model::AnthropicCompletionType::Terminal, + event: anthropic_event_type, language_name: None, + message_id, }, - codegen.telemetry.clone(), - cx.http_client(), - model.api_key(cx), - &executor, + cx, ); } @@ -374,7 +369,7 @@ impl TerminalInlineAssistant { .terminal .update(cx, |this, cx| { this.clear_block_below_cursor(cx); - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); }) .is_ok() } @@ -409,8 +404,6 @@ struct TerminalInlineAssist { prompt_editor: Option>>, codegen: Entity, workspace: WeakEntity, - context_store: Entity, - prompt_store: Option>, _subscriptions: Vec, } @@ -420,8 +413,6 @@ impl TerminalInlineAssist { terminal: &Entity, prompt_editor: Entity>, workspace: WeakEntity, - context_store: Entity, - prompt_store: Option>, window: &mut Window, cx: &mut App, ) -> Self { @@ -431,8 +422,6 @@ impl TerminalInlineAssist { prompt_editor: Some(prompt_editor.clone()), codegen: codegen.clone(), workspace, - context_store, - prompt_store, _subscriptions: vec![ window.subscribe(&prompt_editor, cx, |prompt_editor, event, window, cx| { TerminalInlineAssistant::update_global(cx, |this, cx| { diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index e7f16b8886c719cf60763f651fe9abb9fe33d828..b26ee44ce53503f3f9b9e77b27a22c0bc39d6473 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -2,15 +2,15 @@ use crate::{ language_model_selector::{LanguageModelSelector, language_model_selector}, ui::BurnModeTooltip, }; -use agent_settings::CompletionMode; +use agent_settings::{AgentSettings, CompletionMode}; use anyhow::Result; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection, SlashCommandWorkingSet}; use assistant_slash_commands::{DefaultSlashCommand, FileSlashCommand, selections_creases}; use client::{proto, zed_urls}; use collections::{BTreeSet, HashMap, HashSet, hash_map}; use editor::{ - Anchor, Editor, EditorEvent, MenuEditPredictionsPolicy, MultiBuffer, MultiBufferSnapshot, - RowExt, ToOffset as _, ToPoint, + Anchor, Editor, EditorEvent, MenuEditPredictionsPolicy, MultiBuffer, MultiBufferOffset, + MultiBufferSnapshot, RowExt, ToOffset as _, ToPoint, actions::{MoveToEndOfLine, Newline, ShowCompletions}, display_map::{ BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata, CustomBlockId, FoldId, @@ -22,11 +22,11 @@ use editor::{FoldPlaceholder, display_map::CreaseId}; use fs::Fs; use futures::FutureExt; use gpui::{ - Action, Animation, AnimationExt, AnyElement, AnyView, App, ClipboardEntry, ClipboardItem, - Empty, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, Global, InteractiveElement, - IntoElement, ParentElement, Pixels, Render, RenderImage, SharedString, Size, - StatefulInteractiveElement, Styled, Subscription, Task, WeakEntity, actions, div, img, point, - prelude::*, pulsating_between, size, + Action, Animation, AnimationExt, AnyElement, App, ClipboardEntry, ClipboardItem, Empty, Entity, + EventEmitter, FocusHandle, Focusable, FontWeight, Global, InteractiveElement, IntoElement, + ParentElement, Pixels, Render, RenderImage, SharedString, Size, StatefulInteractiveElement, + Styled, Subscription, Task, WeakEntity, actions, div, img, point, prelude::*, + pulsating_between, size, }; use language::{ BufferSnapshot, LspAdapterDelegate, ToOffset, @@ -66,13 +66,15 @@ use workspace::{ }; use workspace::{ Save, Toast, Workspace, - item::{self, FollowableItem, Item, ItemHandle}, + item::{self, FollowableItem, Item}, notifications::NotificationId, pane, searchable::{SearchEvent, SearchableItem}, }; use zed_actions::agent::{AddSelectionToThread, ToggleModelSelector}; +use crate::CycleFavoriteModels; + use crate::{slash_command::SlashCommandCompletionProvider, slash_command_picker}; use assistant_text_thread::{ CacheStatus, Content, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId, @@ -280,6 +282,8 @@ impl TextThreadEditor { .thought_process_output_sections() .to_vec(); let slash_commands = text_thread.read(cx).slash_commands().clone(); + let focus_handle = editor.read(cx).focus_handle(cx); + let mut this = Self { text_thread, slash_commands, @@ -302,19 +306,34 @@ impl TextThreadEditor { language_model_selector: cx.new(|cx| { language_model_selector( |cx| LanguageModelRegistry::read_global(cx).default_model(), - move |model, cx| { - update_settings_file(fs.clone(), cx, move |settings, _| { - let provider = model.provider_id().0.to_string(); - let model = model.id().0.to_string(); - settings.agent.get_or_insert_default().set_model( - LanguageModelSelection { - provider: LanguageModelProviderSetting(provider), - model, - }, - ) - }); + { + let fs = fs.clone(); + move |model, cx| { + update_settings_file(fs.clone(), cx, move |settings, _| { + let provider = model.provider_id().0.to_string(); + let model = model.id().0.to_string(); + settings.agent.get_or_insert_default().set_model( + LanguageModelSelection { + provider: LanguageModelProviderSetting(provider), + model, + }, + ) + }); + } + }, + { + let fs = fs.clone(); + move |model, should_be_favorite, cx| { + crate::favorite_models::toggle_in_settings( + model, + should_be_favorite, + fs.clone(), + cx, + ); + } }, true, // Use popover styles for picker + focus_handle, window, cx, ) @@ -390,7 +409,7 @@ impl TextThreadEditor { let cursor = user_message .start .to_offset(self.text_thread.read(cx).buffer().read(cx)); - cursor..cursor + MultiBufferOffset(cursor)..MultiBufferOffset(cursor) }; self.editor.update(cx, |editor, cx| { editor.change_selections(Default::default(), window, cx, |selections| { @@ -431,7 +450,7 @@ impl TextThreadEditor { let cursors = self.cursors(cx); self.text_thread.update(cx, |text_thread, cx| { let messages = text_thread - .messages_for_offsets(cursors, cx) + .messages_for_offsets(cursors.into_iter().map(|cursor| cursor.0), cx) .into_iter() .map(|message| message.id) .collect(); @@ -439,9 +458,11 @@ impl TextThreadEditor { }); } - fn cursors(&self, cx: &mut App) -> Vec { + fn cursors(&self, cx: &mut App) -> Vec { let selections = self.editor.update(cx, |editor, cx| { - editor.selections.all::(&editor.display_snapshot(cx)) + editor + .selections + .all::(&editor.display_snapshot(cx)) }); selections .into_iter() @@ -1320,7 +1341,7 @@ impl TextThreadEditor { if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) { active_editor_view.update(cx, |editor, cx| { editor.insert(&text, window, cx); - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); }) } } @@ -1580,7 +1601,11 @@ impl TextThreadEditor { fn get_clipboard_contents( &mut self, cx: &mut Context, - ) -> (String, CopyMetadata, Vec>) { + ) -> ( + String, + CopyMetadata, + Vec>, + ) { let (mut selection, creases) = self.editor.update(cx, |editor, cx| { let mut selection = editor .selections @@ -1638,30 +1663,26 @@ impl TextThreadEditor { // If selection is empty, we want to copy the entire line if selection.range().is_empty() { - let snapshot = text_thread.buffer().read(cx).snapshot(); + let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); let point = snapshot.offset_to_point(selection.range().start); selection.start = snapshot.point_to_offset(Point::new(point.row, 0)); selection.end = snapshot .point_to_offset(cmp::min(Point::new(point.row + 1, 0), snapshot.max_point())); - for chunk in text_thread - .buffer() - .read(cx) - .text_for_range(selection.range()) - { + for chunk in snapshot.text_for_range(selection.range()) { text.push_str(chunk); } } else { for message in text_thread.messages(cx) { - if message.offset_range.start >= selection.range().end { + if message.offset_range.start >= selection.range().end.0 { break; - } else if message.offset_range.end >= selection.range().start { - let range = cmp::max(message.offset_range.start, selection.range().start) - ..cmp::min(message.offset_range.end, selection.range().end); + } else if message.offset_range.end >= selection.range().start.0 { + let range = cmp::max(message.offset_range.start, selection.range().start.0) + ..cmp::min(message.offset_range.end, selection.range().end.0); if !range.is_empty() { for chunk in text_thread.buffer().read(cx).text_for_range(range) { text.push_str(chunk); } - if message.offset_range.end < selection.range().end { + if message.offset_range.end < selection.range().end.0 { text.push('\n'); } } @@ -1677,9 +1698,101 @@ impl TextThreadEditor { window: &mut Window, cx: &mut Context, ) { + let editor_clipboard_selections = cx + .read_from_clipboard() + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + ClipboardEntry::String(text) => { + text.metadata_json::>() + } + _ => None, + }); + + let has_file_context = editor_clipboard_selections + .as_ref() + .is_some_and(|selections| { + selections + .iter() + .any(|sel| sel.file_path.is_some() && sel.line_range.is_some()) + }); + + if has_file_context { + if let Some(clipboard_item) = cx.read_from_clipboard() { + if let Some(ClipboardEntry::String(clipboard_text)) = + clipboard_item.entries().first() + { + if let Some(selections) = editor_clipboard_selections { + cx.stop_propagation(); + + let text = clipboard_text.text(); + self.editor.update(cx, |editor, cx| { + let mut current_offset = 0; + let weak_editor = cx.entity().downgrade(); + + for selection in selections { + if let (Some(file_path), Some(line_range)) = + (selection.file_path, selection.line_range) + { + let selected_text = + &text[current_offset..current_offset + selection.len]; + let fence = assistant_slash_commands::codeblock_fence_for_path( + file_path.to_str(), + Some(line_range.clone()), + ); + let formatted_text = format!("{fence}{selected_text}\n```"); + + let insert_point = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); + let start_row = MultiBufferRow(insert_point.row); + + editor.insert(&formatted_text, window, cx); + + let snapshot = editor.buffer().read(cx).snapshot(cx); + let anchor_before = snapshot.anchor_after(insert_point); + let anchor_after = editor + .selections + .newest_anchor() + .head() + .bias_left(&snapshot); + + editor.insert("\n", window, cx); + + let crease_text = acp_thread::selection_name( + Some(file_path.as_ref()), + &line_range, + ); + + let fold_placeholder = quote_selection_fold_placeholder( + crease_text, + weak_editor.clone(), + ); + let crease = Crease::inline( + anchor_before..anchor_after, + fold_placeholder, + render_quote_selection_output_toggle, + |_, _, _, _| Empty.into_any(), + ); + editor.insert_creases(vec![crease], cx); + editor.fold_at(start_row, window, cx); + + current_offset += selection.len; + if !selection.is_entire_line && current_offset < text.len() { + current_offset += 1; + } + } + } + }); + return; + } + } + } + } + cx.stop_propagation(); - let images = if let Some(item) = cx.read_from_clipboard() { + let mut images = if let Some(item) = cx.read_from_clipboard() { item.into_entries() .filter_map(|entry| { if let ClipboardEntry::Image(image) = entry { @@ -1693,6 +1806,40 @@ impl TextThreadEditor { Vec::new() }; + if let Some(paths) = cx.read_from_clipboard() { + for path in paths + .into_entries() + .filter_map(|entry| { + if let ClipboardEntry::ExternalPaths(paths) = entry { + Some(paths.paths().to_owned()) + } else { + None + } + }) + .flatten() + { + let Ok(content) = std::fs::read(path) else { + continue; + }; + let Ok(format) = image::guess_format(&content) else { + continue; + }; + images.push(gpui::Image::from_bytes( + match format { + image::ImageFormat::Png => gpui::ImageFormat::Png, + image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg, + image::ImageFormat::WebP => gpui::ImageFormat::Webp, + image::ImageFormat::Gif => gpui::ImageFormat::Gif, + image::ImageFormat::Bmp => gpui::ImageFormat::Bmp, + image::ImageFormat::Tiff => gpui::ImageFormat::Tiff, + image::ImageFormat::Ico => gpui::ImageFormat::Ico, + _ => continue, + }, + content, + )); + } + } + let metadata = if let Some(item) = cx.read_from_clipboard() { item.entries().first().and_then(|entry| { if let ClipboardEntry::String(text) = entry { @@ -1709,7 +1856,7 @@ impl TextThreadEditor { self.editor.update(cx, |editor, cx| { let paste_position = editor .selections - .newest::(&editor.display_snapshot(cx)) + .newest::(&editor.display_snapshot(cx)) .head(); editor.paste(action, window, cx); @@ -1757,13 +1904,16 @@ impl TextThreadEditor { editor.transact(window, cx, |editor, _window, cx| { let edits = editor .selections - .all::(&editor.display_snapshot(cx)) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|selection| (selection.start..selection.end, "\n")); editor.edit(edits, cx); let snapshot = editor.buffer().read(cx).snapshot(cx); - for selection in editor.selections.all::(&editor.display_snapshot(cx)) { + for selection in editor + .selections + .all::(&editor.display_snapshot(cx)) + { image_positions.push(snapshot.anchor_before(selection.end)); } }); @@ -1855,7 +2005,7 @@ impl TextThreadEditor { let range = selection .map(|endpoint| endpoint.to_offset(&buffer)) .range(); - text_thread.split_message(range, cx); + text_thread.split_message(range.start.0..range.end.0, cx); } }); } @@ -2061,12 +2211,53 @@ impl TextThreadEditor { }; let focus_handle = self.editor().focus_handle(cx); + let (color, icon) = if self.language_model_selector_menu_handle.is_deployed() { (Color::Accent, IconName::ChevronUp) } else { (Color::Muted, IconName::ChevronDown) }; + let tooltip = Tooltip::element({ + move |_, cx| { + let focus_handle = focus_handle.clone(); + let should_show_cycle_row = !AgentSettings::get_global(cx) + .favorite_model_ids() + .is_empty(); + + v_flex() + .gap_1() + .child( + h_flex() + .gap_2() + .justify_between() + .child(Label::new("Change Model")) + .child(KeyBinding::for_action_in( + &ToggleModelSelector, + &focus_handle, + cx, + )), + ) + .when(should_show_cycle_row, |this| { + this.child( + h_flex() + .pt_1() + .gap_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .justify_between() + .child(Label::new("Cycle Favorited Models")) + .child(KeyBinding::for_action_in( + &CycleFavoriteModels, + &focus_handle, + cx, + )), + ) + }) + .into_any() + } + }); + PickerPopoverMenu::new( self.language_model_selector.clone(), ButtonLike::new("active-model") @@ -2083,9 +2274,7 @@ impl TextThreadEditor { ) .child(Icon::new(icon).color(color).size(IconSize::XSmall)), ), - move |_window, cx| { - Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) - }, + tooltip, gpui::Corner::BottomRight, cx, ) @@ -2445,6 +2634,11 @@ impl Render for TextThreadEditor { .on_action(move |_: &ToggleModelSelector, window, cx| { language_model_selector.toggle(window, cx); }) + .on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| { + this.language_model_selector.update(cx, |selector, cx| { + selector.delegate.cycle_favorite_models(window, cx); + }); + })) .size_full() .child( div() @@ -2514,7 +2708,11 @@ impl Item for TextThreadEditor { Some(self.title(cx).to_string().into()) } - fn as_searchable(&self, handle: &Entity) -> Option> { + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { Some(Box::new(handle.clone())) } @@ -2549,11 +2747,11 @@ impl Item for TextThreadEditor { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } @@ -2576,11 +2774,13 @@ impl SearchableItem for TextThreadEditor { fn update_matches( &mut self, matches: &[Self::Match], + active_match_index: Option, window: &mut Window, cx: &mut Context, ) { - self.editor - .update(cx, |editor, cx| editor.update_matches(matches, window, cx)); + self.editor.update(cx, |editor, cx| { + editor.update_matches(matches, active_match_index, window, cx) + }); } fn query_suggestion(&mut self, window: &mut Window, cx: &mut Context) -> String { @@ -2592,12 +2792,11 @@ impl SearchableItem for TextThreadEditor { &mut self, index: usize, matches: &[Self::Match], - collapse: bool, window: &mut Window, cx: &mut Context, ) { self.editor.update(cx, |editor, cx| { - editor.activate_match(index, matches, collapse, window, cx); + editor.activate_match(index, matches, window, cx); }); } @@ -2930,7 +3129,7 @@ pub fn make_lsp_adapter_delegate( #[cfg(test)] mod tests { use super::*; - use editor::SelectionEffects; + use editor::{MultiBufferOffset, SelectionEffects}; use fs::FakeFs; use gpui::{App, TestAppContext, VisualTestContext}; use indoc::indoc; @@ -3136,15 +3335,16 @@ mod tests { text_thread: &Entity, message_ix: usize, cx: &mut TestAppContext, - ) -> Range { - text_thread.update(cx, |text_thread, cx| { + ) -> Range { + let range = text_thread.update(cx, |text_thread, cx| { text_thread .messages(cx) .nth(message_ix) .unwrap() .anchor_range .to_offset(&text_thread.buffer().read(cx).snapshot()) - }) + }); + MultiBufferOffset(range.start)..MultiBufferOffset(range.end) } fn assert_copy_paste_text_thread_editor( @@ -3184,7 +3384,6 @@ mod tests { let mut text_thread = TextThread::local( registry, None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, diff --git a/crates/agent_ui/src/ui.rs b/crates/agent_ui/src/ui.rs index 5363949b904d74d3749c066357e0c60fef19d3b9..b484fdb6c6c480f1cffe78eea7a51f635d3906a1 100644 --- a/crates/agent_ui/src/ui.rs +++ b/crates/agent_ui/src/ui.rs @@ -2,18 +2,18 @@ mod acp_onboarding_modal; mod agent_notification; mod burn_mode_tooltip; mod claude_code_onboarding_modal; -mod context_pill; mod end_trial_upsell; +mod hold_for_default; +mod model_selector_components; mod onboarding_modal; -mod unavailable_editing_tooltip; mod usage_callout; pub use acp_onboarding_modal::*; pub use agent_notification::*; pub use burn_mode_tooltip::*; pub use claude_code_onboarding_modal::*; -pub use context_pill::*; pub use end_trial_upsell::*; +pub use hold_for_default::*; +pub use model_selector_components::*; pub use onboarding_modal::*; -pub use unavailable_editing_tooltip::*; pub use usage_callout::*; diff --git a/crates/agent_ui/src/ui/acp_onboarding_modal.rs b/crates/agent_ui/src/ui/acp_onboarding_modal.rs index 8433904fb3b540c2d78c8634b7a6755303d6e15c..e48a36bd5af3eff578e230195dc2247900977173 100644 --- a/crates/agent_ui/src/ui/acp_onboarding_modal.rs +++ b/crates/agent_ui/src/ui/acp_onboarding_modal.rs @@ -222,8 +222,8 @@ impl Render for AcpOnboardingModal { acp_onboarding_event!("Canceled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child(illustration) .child( diff --git a/crates/agent_ui/src/ui/agent_notification.rs b/crates/agent_ui/src/ui/agent_notification.rs index af2a022f147b79a0a299c17dd26c7e9a8b62aeb9..34ca0bb32a82aa23d1b954554ce2dfec436bfe1c 100644 --- a/crates/agent_ui/src/ui/agent_notification.rs +++ b/crates/agent_ui/src/ui/agent_notification.rs @@ -106,9 +106,6 @@ impl Render for AgentNotification { .font(ui_font) .border_color(cx.theme().colors().border) .rounded_xl() - .on_click(cx.listener(|_, _, _, cx| { - cx.emit(AgentNotificationEvent::Accepted); - })) .child( h_flex() .items_start() diff --git a/crates/agent_ui/src/ui/claude_code_onboarding_modal.rs b/crates/agent_ui/src/ui/claude_code_onboarding_modal.rs index 06980f18977aefe228bb7f09962e69fe2b3a5068..a8f007666d8957a7195fdf36b612b578b16f543c 100644 --- a/crates/agent_ui/src/ui/claude_code_onboarding_modal.rs +++ b/crates/agent_ui/src/ui/claude_code_onboarding_modal.rs @@ -230,8 +230,8 @@ impl Render for ClaudeCodeOnboardingModal { claude_code_onboarding_event!("Canceled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child(illustration) .child( diff --git a/crates/agent_ui/src/ui/context_pill.rs b/crates/agent_ui/src/ui/context_pill.rs deleted file mode 100644 index 89bf618a16d3fb8e7abc5afaf34ee6e8bb43ab67..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/ui/context_pill.rs +++ /dev/null @@ -1,858 +0,0 @@ -use std::{ops::Range, path::Path, rc::Rc, sync::Arc, time::Duration}; - -use file_icons::FileIcons; -use futures::FutureExt as _; -use gpui::{ - Animation, AnimationExt as _, AnyView, ClickEvent, Entity, Image, MouseButton, Task, - pulsating_between, -}; -use language_model::LanguageModelImage; -use project::Project; -use prompt_store::PromptStore; -use rope::Point; -use ui::{IconButtonShape, Tooltip, prelude::*, tooltip_container}; -use util::paths::PathStyle; - -use crate::context::{ - AgentContextHandle, ContextId, ContextKind, DirectoryContextHandle, FetchedUrlContext, - FileContextHandle, ImageContext, ImageStatus, RulesContextHandle, SelectionContextHandle, - SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, -}; - -#[derive(IntoElement)] -pub enum ContextPill { - Added { - context: AddedContext, - dupe_name: bool, - focused: bool, - on_click: Option>, - on_remove: Option>, - }, - Suggested { - name: SharedString, - icon_path: Option, - kind: ContextKind, - focused: bool, - on_click: Option>, - }, -} - -impl ContextPill { - pub fn added( - context: AddedContext, - dupe_name: bool, - focused: bool, - on_remove: Option>, - ) -> Self { - Self::Added { - context, - dupe_name, - on_remove, - focused, - on_click: None, - } - } - - pub fn suggested( - name: SharedString, - icon_path: Option, - kind: ContextKind, - focused: bool, - ) -> Self { - Self::Suggested { - name, - icon_path, - kind, - focused, - on_click: None, - } - } - - pub fn on_click(mut self, listener: Rc) -> Self { - match &mut self { - ContextPill::Added { on_click, .. } => { - *on_click = Some(listener); - } - ContextPill::Suggested { on_click, .. } => { - *on_click = Some(listener); - } - } - self - } - - pub fn id(&self) -> ElementId { - match self { - Self::Added { context, .. } => context.handle.element_id("context-pill".into()), - Self::Suggested { .. } => "suggested-context-pill".into(), - } - } - - pub fn icon(&self) -> Icon { - match self { - Self::Suggested { - icon_path: Some(icon_path), - .. - } => Icon::from_path(icon_path), - Self::Suggested { kind, .. } => Icon::new(kind.icon()), - Self::Added { context, .. } => context.icon(), - } - } -} - -impl RenderOnce for ContextPill { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let color = cx.theme().colors(); - - let base_pill = h_flex() - .id(self.id()) - .pl_1() - .pb(px(1.)) - .border_1() - .rounded_sm() - .gap_1() - .child(self.icon().size(IconSize::XSmall).color(Color::Muted)); - - match &self { - ContextPill::Added { - context, - dupe_name, - on_remove, - focused, - on_click, - } => { - let status_is_error = matches!(context.status, ContextStatus::Error { .. }); - let status_is_warning = matches!(context.status, ContextStatus::Warning { .. }); - - base_pill - .pr(if on_remove.is_some() { px(2.) } else { px(4.) }) - .map(|pill| { - if status_is_error { - pill.bg(cx.theme().status().error_background) - .border_color(cx.theme().status().error_border) - } else if status_is_warning { - pill.bg(cx.theme().status().warning_background) - .border_color(cx.theme().status().warning_border) - } else if *focused { - pill.bg(color.element_background) - .border_color(color.border_focused) - } else { - pill.bg(color.element_background) - .border_color(color.border.opacity(0.5)) - } - }) - .child( - h_flex() - .id("context-data") - .gap_1() - .child( - div().max_w_64().child( - Label::new(context.name.clone()) - .size(LabelSize::Small) - .truncate(), - ), - ) - .when_some(context.parent.as_ref(), |element, parent_name| { - if *dupe_name { - element.child( - Label::new(parent_name.clone()) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - } else { - element - } - }) - .when_some(context.tooltip.as_ref(), |element, tooltip| { - element.tooltip(Tooltip::text(tooltip.clone())) - }) - .map(|element| match &context.status { - ContextStatus::Ready => element - .when_some( - context.render_hover.as_ref(), - |element, render_hover| { - let render_hover = render_hover.clone(); - element.hoverable_tooltip(move |window, cx| { - render_hover(window, cx) - }) - }, - ) - .into_any(), - ContextStatus::Loading { message } => element - .tooltip(ui::Tooltip::text(message.clone())) - .with_animation( - "pulsating-ctx-pill", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.opacity(delta), - ) - .into_any_element(), - ContextStatus::Warning { message } - | ContextStatus::Error { message } => element - .tooltip(ui::Tooltip::text(message.clone())) - .into_any_element(), - }), - ) - .when_some(on_remove.as_ref(), |element, on_remove| { - element.child( - IconButton::new( - context.handle.element_id("remove".into()), - IconName::Close, - ) - .shape(IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .tooltip(Tooltip::text("Remove Context")) - .on_click({ - let on_remove = on_remove.clone(); - move |event, window, cx| on_remove(event, window, cx) - }), - ) - }) - .when_some(on_click.as_ref(), |element, on_click| { - let on_click = on_click.clone(); - element.cursor_pointer().on_click(move |event, window, cx| { - on_click(event, window, cx); - cx.stop_propagation(); - }) - }) - .into_any_element() - } - ContextPill::Suggested { - name, - icon_path: _, - kind: _, - focused, - on_click, - } => base_pill - .cursor_pointer() - .pr_1() - .border_dashed() - .map(|pill| { - if *focused { - pill.border_color(color.border_focused) - .bg(color.element_background.opacity(0.5)) - } else { - pill.border_color(color.border) - } - }) - .hover(|style| style.bg(color.element_hover.opacity(0.5))) - .child( - div().max_w_64().child( - Label::new(name.clone()) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), - ), - ) - .tooltip(|_window, cx| { - Tooltip::with_meta("Suggested Context", None, "Click to add it", cx) - }) - .when_some(on_click.as_ref(), |element, on_click| { - let on_click = on_click.clone(); - element.on_click(move |event, window, cx| { - on_click(event, window, cx); - cx.stop_propagation(); - }) - }) - .into_any(), - } - } -} - -pub enum ContextStatus { - Ready, - Loading { message: SharedString }, - Error { message: SharedString }, - Warning { message: SharedString }, -} - -#[derive(RegisterComponent)] -pub struct AddedContext { - pub handle: AgentContextHandle, - pub kind: ContextKind, - pub name: SharedString, - pub parent: Option, - pub tooltip: Option, - pub icon_path: Option, - pub status: ContextStatus, - pub render_hover: Option AnyView + 'static>>, -} - -impl AddedContext { - pub fn icon(&self) -> Icon { - match &self.status { - ContextStatus::Warning { .. } => Icon::new(IconName::Warning).color(Color::Warning), - ContextStatus::Error { .. } => Icon::new(IconName::XCircle).color(Color::Error), - _ => { - if let Some(icon_path) = &self.icon_path { - Icon::from_path(icon_path) - } else { - Icon::new(self.kind.icon()) - } - } - } - } - /// Creates an `AddedContext` by retrieving relevant details of `AgentContext`. This returns a - /// `None` if `DirectoryContext` or `RulesContext` no longer exist. - /// - /// TODO: `None` cases are unremovable from `ContextStore` and so are a very minor memory leak. - pub fn new_pending( - handle: AgentContextHandle, - prompt_store: Option<&Entity>, - project: &Project, - model: Option<&Arc>, - cx: &App, - ) -> Option { - match handle { - AgentContextHandle::File(handle) => { - Self::pending_file(handle, project.path_style(cx), cx) - } - AgentContextHandle::Directory(handle) => Self::pending_directory(handle, project, cx), - AgentContextHandle::Symbol(handle) => { - Self::pending_symbol(handle, project.path_style(cx), cx) - } - AgentContextHandle::Selection(handle) => { - Self::pending_selection(handle, project.path_style(cx), cx) - } - AgentContextHandle::FetchedUrl(handle) => Some(Self::fetched_url(handle)), - AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)), - AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)), - AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx), - AgentContextHandle::Image(handle) => { - Some(Self::image(handle, model, project.path_style(cx), cx)) - } - } - } - - fn pending_file( - handle: FileContextHandle, - path_style: PathStyle, - cx: &App, - ) -> Option { - let full_path = handle - .buffer - .read(cx) - .file()? - .full_path(cx) - .to_string_lossy() - .to_string(); - Some(Self::file(handle, &full_path, path_style, cx)) - } - - fn file( - handle: FileContextHandle, - full_path: &str, - path_style: PathStyle, - cx: &App, - ) -> AddedContext { - let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style); - AddedContext { - kind: ContextKind::File, - name, - parent, - tooltip: Some(SharedString::new(full_path)), - icon_path: FileIcons::get_icon(Path::new(full_path), cx), - status: ContextStatus::Ready, - render_hover: None, - handle: AgentContextHandle::File(handle), - } - } - - fn pending_directory( - handle: DirectoryContextHandle, - project: &Project, - cx: &App, - ) -> Option { - let worktree = project.worktree_for_entry(handle.entry_id, cx)?.read(cx); - let entry = worktree.entry_for_id(handle.entry_id)?; - let full_path = worktree - .full_path(&entry.path) - .to_string_lossy() - .to_string(); - Some(Self::directory(handle, &full_path, project.path_style(cx))) - } - - fn directory( - handle: DirectoryContextHandle, - full_path: &str, - path_style: PathStyle, - ) -> AddedContext { - let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style); - AddedContext { - kind: ContextKind::Directory, - name, - parent, - tooltip: Some(SharedString::new(full_path)), - icon_path: None, - status: ContextStatus::Ready, - render_hover: None, - handle: AgentContextHandle::Directory(handle), - } - } - - fn pending_symbol( - handle: SymbolContextHandle, - path_style: PathStyle, - cx: &App, - ) -> Option { - let excerpt = ContextFileExcerpt::new( - &handle.full_path(cx)?.to_string_lossy(), - handle.enclosing_line_range(cx), - path_style, - cx, - ); - Some(AddedContext { - kind: ContextKind::Symbol, - name: handle.symbol.clone(), - parent: Some(excerpt.file_name_and_range.clone()), - tooltip: None, - icon_path: None, - status: ContextStatus::Ready, - render_hover: { - let handle = handle.clone(); - Some(Rc::new(move |_, cx| { - excerpt.hover_view(handle.text(cx), cx).into() - })) - }, - handle: AgentContextHandle::Symbol(handle), - }) - } - - fn pending_selection( - handle: SelectionContextHandle, - path_style: PathStyle, - cx: &App, - ) -> Option { - let excerpt = ContextFileExcerpt::new( - &handle.full_path(cx)?.to_string_lossy(), - handle.line_range(cx), - path_style, - cx, - ); - Some(AddedContext { - kind: ContextKind::Selection, - name: excerpt.file_name_and_range.clone(), - parent: excerpt.parent_name.clone(), - tooltip: None, - icon_path: excerpt.icon_path.clone(), - status: ContextStatus::Ready, - render_hover: { - let handle = handle.clone(); - Some(Rc::new(move |_, cx| { - excerpt.hover_view(handle.text(cx), cx).into() - })) - }, - handle: AgentContextHandle::Selection(handle), - }) - } - - fn fetched_url(context: FetchedUrlContext) -> AddedContext { - AddedContext { - kind: ContextKind::FetchedUrl, - name: context.url.clone(), - parent: None, - tooltip: None, - icon_path: None, - status: ContextStatus::Ready, - render_hover: None, - handle: AgentContextHandle::FetchedUrl(context), - } - } - - fn pending_thread(handle: ThreadContextHandle, cx: &App) -> AddedContext { - AddedContext { - kind: ContextKind::Thread, - name: handle.title(cx), - parent: None, - tooltip: None, - icon_path: None, - status: if handle.thread.read(cx).is_generating_summary() { - ContextStatus::Loading { - message: "Summarizing…".into(), - } - } else { - ContextStatus::Ready - }, - render_hover: { - let thread = handle.thread.clone(); - Some(Rc::new(move |_, cx| { - let text = thread - .update(cx, |thread, cx| thread.summary(cx)) - .now_or_never() - .flatten() - .unwrap_or_else(|| SharedString::from(thread.read(cx).to_markdown())); - ContextPillHover::new_text(text, cx).into() - })) - }, - handle: AgentContextHandle::Thread(handle), - } - } - - fn pending_text_thread(handle: TextThreadContextHandle, cx: &App) -> AddedContext { - AddedContext { - kind: ContextKind::TextThread, - name: handle.title(cx), - parent: None, - tooltip: None, - icon_path: None, - status: ContextStatus::Ready, - render_hover: { - let text_thread = handle.text_thread.clone(); - Some(Rc::new(move |_, cx| { - let text = text_thread.read(cx).to_xml(cx); - ContextPillHover::new_text(text.into(), cx).into() - })) - }, - handle: AgentContextHandle::TextThread(handle), - } - } - - fn pending_rules( - handle: RulesContextHandle, - prompt_store: Option<&Entity>, - cx: &App, - ) -> Option { - let title = prompt_store - .as_ref()? - .read(cx) - .metadata(handle.prompt_id.into())? - .title - .unwrap_or_else(|| "Unnamed Rule".into()); - Some(AddedContext { - kind: ContextKind::Rules, - name: title, - parent: None, - tooltip: None, - icon_path: None, - status: ContextStatus::Ready, - render_hover: None, - handle: AgentContextHandle::Rules(handle), - }) - } - - fn image( - context: ImageContext, - model: Option<&Arc>, - path_style: PathStyle, - cx: &App, - ) -> AddedContext { - let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() { - let (name, parent) = - extract_file_name_and_directory_from_full_path(full_path, path_style); - let icon_path = FileIcons::get_icon(Path::new(full_path), cx); - (name, parent, icon_path) - } else { - ("Image".into(), None, None) - }; - - let status = match context.status(model) { - ImageStatus::Loading => ContextStatus::Loading { - message: "Loading…".into(), - }, - ImageStatus::Error => ContextStatus::Error { - message: "Failed to load Image".into(), - }, - ImageStatus::Warning => ContextStatus::Warning { - message: format!( - "{} doesn't support attaching Images as Context", - model.map(|m| m.name().0).unwrap_or_else(|| "Model".into()) - ) - .into(), - }, - ImageStatus::Ready => ContextStatus::Ready, - }; - - AddedContext { - kind: ContextKind::Image, - name, - parent, - tooltip: None, - icon_path, - status, - render_hover: Some(Rc::new({ - let image = context.original_image.clone(); - move |_, cx| { - let image = image.clone(); - ContextPillHover::new(cx, move |_, _| { - gpui::img(image.clone()) - .max_w_96() - .max_h_96() - .into_any_element() - }) - .into() - } - })), - handle: AgentContextHandle::Image(context), - } - } -} - -fn extract_file_name_and_directory_from_full_path( - path: &str, - path_style: PathStyle, -) -> (SharedString, Option) { - let (parent, file_name) = path_style.split(path); - let parent = parent.and_then(|parent| { - let parent = parent.trim_end_matches(path_style.separator()); - let (_, parent) = path_style.split(parent); - if parent.is_empty() { - None - } else { - Some(SharedString::new(parent)) - } - }); - (SharedString::new(file_name), parent) -} - -#[derive(Debug, Clone)] -struct ContextFileExcerpt { - pub file_name_and_range: SharedString, - pub full_path_and_range: SharedString, - pub parent_name: Option, - pub icon_path: Option, -} - -impl ContextFileExcerpt { - pub fn new(full_path: &str, line_range: Range, path_style: PathStyle, cx: &App) -> Self { - let (parent, file_name) = path_style.split(full_path); - let line_range_text = format!(" ({}-{})", line_range.start.row + 1, line_range.end.row + 1); - let mut full_path_and_range = full_path.to_owned(); - full_path_and_range.push_str(&line_range_text); - let mut file_name_and_range = file_name.to_owned(); - file_name_and_range.push_str(&line_range_text); - - let parent_name = parent.and_then(|parent| { - let parent = parent.trim_end_matches(path_style.separator()); - let (_, parent) = path_style.split(parent); - if parent.is_empty() { - None - } else { - Some(SharedString::new(parent)) - } - }); - - let icon_path = FileIcons::get_icon(Path::new(full_path), cx); - - ContextFileExcerpt { - file_name_and_range: file_name_and_range.into(), - full_path_and_range: full_path_and_range.into(), - parent_name, - icon_path, - } - } - - fn hover_view(&self, text: SharedString, cx: &mut App) -> Entity { - let icon_path = self.icon_path.clone(); - let full_path_and_range = self.full_path_and_range.clone(); - ContextPillHover::new(cx, move |_, cx| { - v_flex() - .child( - h_flex() - .gap_0p5() - .w_full() - .max_w_full() - .border_b_1() - .border_color(cx.theme().colors().border.opacity(0.6)) - .children( - icon_path - .clone() - .map(Icon::from_path) - .map(|icon| icon.color(Color::Muted).size(IconSize::XSmall)), - ) - .child( - // TODO: make this truncate on the left. - Label::new(full_path_and_range.clone()) - .size(LabelSize::Small) - .ml_1(), - ), - ) - .child( - div() - .id("context-pill-hover-contents") - .overflow_scroll() - .max_w_128() - .max_h_96() - .child(Label::new(text.clone()).buffer_font(cx)), - ) - .into_any_element() - }) - } -} - -struct ContextPillHover { - render_hover: Box AnyElement>, -} - -impl ContextPillHover { - fn new( - cx: &mut App, - render_hover: impl Fn(&mut Window, &mut App) -> AnyElement + 'static, - ) -> Entity { - cx.new(|_| Self { - render_hover: Box::new(render_hover), - }) - } - - fn new_text(content: SharedString, cx: &mut App) -> Entity { - Self::new(cx, move |_, _| { - div() - .id("context-pill-hover-contents") - .overflow_scroll() - .max_w_128() - .max_h_96() - .child(content.clone()) - .into_any_element() - }) - } -} - -impl Render for ContextPillHover { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - tooltip_container(cx, move |this, cx| { - this.occlude() - .on_mouse_move(|_, _, cx| cx.stop_propagation()) - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) - .child((self.render_hover)(window, cx)) - }) - } -} - -impl Component for AddedContext { - fn scope() -> ComponentScope { - ComponentScope::Agent - } - - fn sort_name() -> &'static str { - "AddedContext" - } - - fn preview(_window: &mut Window, cx: &mut App) -> Option { - let mut next_context_id = ContextId::zero(); - let image_ready = ( - "Ready", - AddedContext::image( - ImageContext { - context_id: next_context_id.post_inc(), - project_path: None, - full_path: None, - original_image: Arc::new(Image::empty()), - image_task: Task::ready(Some(LanguageModelImage::empty())).shared(), - }, - None, - PathStyle::local(), - cx, - ), - ); - - let image_loading = ( - "Loading", - AddedContext::image( - ImageContext { - context_id: next_context_id.post_inc(), - project_path: None, - full_path: None, - original_image: Arc::new(Image::empty()), - image_task: cx - .background_spawn(async move { - smol::Timer::after(Duration::from_secs(60 * 5)).await; - Some(LanguageModelImage::empty()) - }) - .shared(), - }, - None, - PathStyle::local(), - cx, - ), - ); - - let image_error = ( - "Error", - AddedContext::image( - ImageContext { - context_id: next_context_id.post_inc(), - project_path: None, - full_path: None, - original_image: Arc::new(Image::empty()), - image_task: Task::ready(None).shared(), - }, - None, - PathStyle::local(), - cx, - ), - ); - - Some( - v_flex() - .gap_6() - .children( - vec![image_ready, image_loading, image_error] - .into_iter() - .map(|(text, context)| { - single_example( - text, - ContextPill::added(context, false, false, None).into_any_element(), - ) - }), - ) - .into_any(), - ) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::App; - use language_model::{LanguageModel, fake_provider::FakeLanguageModel}; - use std::sync::Arc; - - #[gpui::test] - fn test_image_context_warning_for_unsupported_model(cx: &mut App) { - let model: Arc = Arc::new(FakeLanguageModel::default()); - assert!(!model.supports_images()); - - let image_context = ImageContext { - context_id: ContextId::zero(), - project_path: None, - original_image: Arc::new(Image::empty()), - image_task: Task::ready(Some(LanguageModelImage::empty())).shared(), - full_path: None, - }; - - let added_context = - AddedContext::image(image_context, Some(&model), PathStyle::local(), cx); - - assert!(matches!( - added_context.status, - ContextStatus::Warning { .. } - )); - - assert!(matches!(added_context.kind, ContextKind::Image)); - assert_eq!(added_context.name.as_ref(), "Image"); - assert!(added_context.parent.is_none()); - assert!(added_context.icon_path.is_none()); - } - - #[gpui::test] - fn test_image_context_ready_for_no_model(cx: &mut App) { - let image_context = ImageContext { - context_id: ContextId::zero(), - project_path: None, - original_image: Arc::new(Image::empty()), - image_task: Task::ready(Some(LanguageModelImage::empty())).shared(), - full_path: None, - }; - - let added_context = AddedContext::image(image_context, None, PathStyle::local(), cx); - - assert!( - matches!(added_context.status, ContextStatus::Ready), - "Expected ready status when no model provided" - ); - - assert!(matches!(added_context.kind, ContextKind::Image)); - assert_eq!(added_context.name.as_ref(), "Image"); - assert!(added_context.parent.is_none()); - assert!(added_context.icon_path.is_none()); - } -} diff --git a/crates/agent_ui/src/ui/hold_for_default.rs b/crates/agent_ui/src/ui/hold_for_default.rs new file mode 100644 index 0000000000000000000000000000000000000000..409e5d59707caa3a6bc62bbf470e33cb150183f5 --- /dev/null +++ b/crates/agent_ui/src/ui/hold_for_default.rs @@ -0,0 +1,40 @@ +use gpui::{App, IntoElement, Modifiers, RenderOnce, Window}; +use ui::{prelude::*, render_modifiers}; + +#[derive(IntoElement)] +pub struct HoldForDefault { + is_default: bool, +} + +impl HoldForDefault { + pub fn new(is_default: bool) -> Self { + Self { is_default } + } +} + +impl RenderOnce for HoldForDefault { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + h_flex() + .pt_1() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .gap_0p5() + .text_sm() + .text_color(Color::Muted.color(cx)) + .child("Hold") + .child(h_flex().flex_shrink_0().children(render_modifiers( + &Modifiers::secondary_key(), + PlatformStyle::platform(), + None, + Some(TextSize::Default.rems(cx).into()), + true, + ))) + .child(div().map(|this| { + if self.is_default { + this.child("to unset as default") + } else { + this.child("to set as default") + } + })) + } +} diff --git a/crates/agent_ui/src/ui/model_selector_components.rs b/crates/agent_ui/src/ui/model_selector_components.rs new file mode 100644 index 0000000000000000000000000000000000000000..061b4f58288798696b068a091fb392c033906627 --- /dev/null +++ b/crates/agent_ui/src/ui/model_selector_components.rs @@ -0,0 +1,176 @@ +use gpui::{Action, FocusHandle, prelude::*}; +use ui::{ElevationIndex, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; + +#[derive(IntoElement)] +pub struct ModelSelectorHeader { + title: SharedString, + has_border: bool, +} + +impl ModelSelectorHeader { + pub fn new(title: impl Into, has_border: bool) -> Self { + Self { + title: title.into(), + has_border, + } + } +} + +impl RenderOnce for ModelSelectorHeader { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + div() + .px_2() + .pb_1() + .when(self.has_border, |this| { + this.mt_1() + .pt_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + }) + .child( + Label::new(self.title) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + } +} + +#[derive(IntoElement)] +pub struct ModelSelectorListItem { + index: usize, + title: SharedString, + icon: Option, + is_selected: bool, + is_focused: bool, + is_favorite: bool, + on_toggle_favorite: Option>, +} + +impl ModelSelectorListItem { + pub fn new(index: usize, title: impl Into) -> Self { + Self { + index, + title: title.into(), + icon: None, + is_selected: false, + is_focused: false, + is_favorite: false, + on_toggle_favorite: None, + } + } + + pub fn icon(mut self, icon: IconName) -> Self { + self.icon = Some(icon); + self + } + + pub fn is_selected(mut self, is_selected: bool) -> Self { + self.is_selected = is_selected; + self + } + + pub fn is_focused(mut self, is_focused: bool) -> Self { + self.is_focused = is_focused; + self + } + + pub fn is_favorite(mut self, is_favorite: bool) -> Self { + self.is_favorite = is_favorite; + self + } + + pub fn on_toggle_favorite(mut self, handler: impl Fn(&App) + 'static) -> Self { + self.on_toggle_favorite = Some(Box::new(handler)); + self + } +} + +impl RenderOnce for ModelSelectorListItem { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let model_icon_color = if self.is_selected { + Color::Accent + } else { + Color::Muted + }; + + let is_favorite = self.is_favorite; + + ListItem::new(self.index) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(self.is_focused) + .child( + h_flex() + .w_full() + .gap_1p5() + .when_some(self.icon, |this, icon| { + this.child( + Icon::new(icon) + .color(model_icon_color) + .size(IconSize::Small), + ) + }) + .child(Label::new(self.title).truncate()), + ) + .end_slot(div().pr_2().when(self.is_selected, |this| { + this.child(Icon::new(IconName::Check).color(Color::Accent)) + })) + .end_hover_slot(div().pr_1p5().when_some(self.on_toggle_favorite, { + |this, handle_click| { + let (icon, color, tooltip) = if is_favorite { + (IconName::StarFilled, Color::Accent, "Unfavorite Model") + } else { + (IconName::Star, Color::Default, "Favorite Model") + }; + this.child( + IconButton::new(("toggle-favorite", self.index), icon) + .layer(ElevationIndex::ElevatedSurface) + .icon_color(color) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text(tooltip)) + .on_click(move |_, _, cx| (handle_click)(cx)), + ) + } + })) + } +} + +#[derive(IntoElement)] +pub struct ModelSelectorFooter { + action: Box, + focus_handle: FocusHandle, +} + +impl ModelSelectorFooter { + pub fn new(action: Box, focus_handle: FocusHandle) -> Self { + Self { + action, + focus_handle, + } + } +} + +impl RenderOnce for ModelSelectorFooter { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let action = self.action; + let focus_handle = self.focus_handle; + + h_flex() + .w_full() + .p_1p5() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child( + Button::new("configure", "Configure") + .full_width() + .style(ButtonStyle::Outlined) + .key_binding( + KeyBinding::for_action_in(action.as_ref(), &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(move |_, window, cx| { + window.dispatch_action(action.boxed_clone(), cx); + }), + ) + } +} diff --git a/crates/agent_ui/src/ui/onboarding_modal.rs b/crates/agent_ui/src/ui/onboarding_modal.rs index ad404afa784974631f914e6fece2de6b6c7d6a46..b8ec2b00657efca29fede32a5cc23b669ede66e7 100644 --- a/crates/agent_ui/src/ui/onboarding_modal.rs +++ b/crates/agent_ui/src/ui/onboarding_modal.rs @@ -83,8 +83,8 @@ impl Render for AgentOnboardingModal { agent_onboarding_event!("Canceled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child( div() diff --git a/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs b/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs deleted file mode 100644 index 2993fb89a989619ecfe3d79b06d82a2a6f71fc31..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs +++ /dev/null @@ -1,29 +0,0 @@ -use gpui::{Context, IntoElement, Render, Window}; -use ui::{prelude::*, tooltip_container}; - -pub struct UnavailableEditingTooltip { - agent_name: SharedString, -} - -impl UnavailableEditingTooltip { - pub fn new(agent_name: SharedString) -> Self { - Self { agent_name } - } -} - -impl Render for UnavailableEditingTooltip { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - tooltip_container(cx, |this, _| { - this.child(Label::new("Unavailable Editing")).child( - div().max_w_64().child( - Label::new(format!( - "Editing previous messages is not available for {} yet.", - self.agent_name - )) - .size(LabelSize::Small) - .color(Color::Muted), - ), - ) - }) - } -} diff --git a/crates/agent_ui_v2/Cargo.toml b/crates/agent_ui_v2/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..2b2cf337adf578432d594ce14f2f58e5911c45fb --- /dev/null +++ b/crates/agent_ui_v2/Cargo.toml @@ -0,0 +1,47 @@ +[package] +name = "agent_ui_v2" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/agent_ui_v2.rs" +doctest = false + +[features] +test-support = ["agent/test-support"] + + +[dependencies] +agent.workspace = true +agent_servers.workspace = true +agent_settings.workspace = true +agent_ui.workspace = true +anyhow.workspace = true +assistant_text_thread.workspace = true +chrono.workspace = true +db.workspace = true +editor.workspace = true +feature_flags.workspace = true +fs.workspace = true +fuzzy.workspace = true +gpui.workspace = true +menu.workspace = true +project.workspace = true +prompt_store.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +text.workspace = true +time.workspace = true +time_format.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[dev-dependencies] +agent = { workspace = true, features = ["test-support"] } diff --git a/crates/cloud_zeta2_prompt/LICENSE-GPL b/crates/agent_ui_v2/LICENSE-GPL similarity index 100% rename from crates/cloud_zeta2_prompt/LICENSE-GPL rename to crates/agent_ui_v2/LICENSE-GPL diff --git a/crates/agent_ui_v2/src/agent_thread_pane.rs b/crates/agent_ui_v2/src/agent_thread_pane.rs new file mode 100644 index 0000000000000000000000000000000000000000..72886f87eca38c630ec29b9b410930f1d3936b50 --- /dev/null +++ b/crates/agent_ui_v2/src/agent_thread_pane.rs @@ -0,0 +1,287 @@ +use agent::{HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer}; +use agent_servers::AgentServer; +use agent_settings::AgentSettings; +use agent_ui::acp::AcpThreadView; +use fs::Fs; +use gpui::{ + Entity, EventEmitter, Focusable, Pixels, SharedString, Subscription, WeakEntity, prelude::*, +}; +use project::Project; +use prompt_store::PromptStore; +use serde::{Deserialize, Serialize}; +use settings::DockSide; +use settings::Settings as _; +use std::rc::Rc; +use std::sync::Arc; +use ui::{Tab, Tooltip, prelude::*}; +use workspace::{ + Workspace, + dock::{ClosePane, MinimizePane, UtilityPane, UtilityPanePosition}, + utility_pane::UtilityPaneSlot, +}; + +pub const DEFAULT_UTILITY_PANE_WIDTH: Pixels = gpui::px(400.0); + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub enum SerializedHistoryEntryId { + AcpThread(String), + TextThread(String), +} + +impl From for SerializedHistoryEntryId { + fn from(id: HistoryEntryId) -> Self { + match id { + HistoryEntryId::AcpThread(session_id) => { + SerializedHistoryEntryId::AcpThread(session_id.0.to_string()) + } + HistoryEntryId::TextThread(path) => { + SerializedHistoryEntryId::TextThread(path.to_string_lossy().to_string()) + } + } + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct SerializedAgentThreadPane { + pub expanded: bool, + pub width: Option, + pub thread_id: Option, +} + +pub enum AgentsUtilityPaneEvent { + StateChanged, +} + +impl EventEmitter for AgentThreadPane {} +impl EventEmitter for AgentThreadPane {} +impl EventEmitter for AgentThreadPane {} + +struct ActiveThreadView { + view: Entity, + thread_id: HistoryEntryId, + _notify: Subscription, +} + +pub struct AgentThreadPane { + focus_handle: gpui::FocusHandle, + expanded: bool, + width: Option, + thread_view: Option, + workspace: WeakEntity, +} + +impl AgentThreadPane { + pub fn new(workspace: WeakEntity, cx: &mut ui::Context) -> Self { + let focus_handle = cx.focus_handle(); + Self { + focus_handle, + expanded: false, + width: None, + thread_view: None, + workspace, + } + } + + pub fn thread_id(&self) -> Option { + self.thread_view.as_ref().map(|tv| tv.thread_id.clone()) + } + + pub fn serialize(&self) -> SerializedAgentThreadPane { + SerializedAgentThreadPane { + expanded: self.expanded, + width: self.width, + thread_id: self.thread_id().map(SerializedHistoryEntryId::from), + } + } + + pub fn open_thread( + &mut self, + entry: HistoryEntry, + fs: Arc, + workspace: WeakEntity, + project: Entity, + history_store: Entity, + prompt_store: Option>, + window: &mut Window, + cx: &mut Context, + ) { + let thread_id = entry.id(); + + let resume_thread = match &entry { + HistoryEntry::AcpThread(thread) => Some(thread.clone()), + HistoryEntry::TextThread(_) => None, + }; + + let agent: Rc = Rc::new(NativeAgentServer::new(fs, history_store.clone())); + + let thread_view = cx.new(|cx| { + AcpThreadView::new( + agent, + resume_thread, + None, + workspace, + project, + history_store, + prompt_store, + true, + window, + cx, + ) + }); + + let notify = cx.observe(&thread_view, |_, _, cx| { + cx.notify(); + }); + + self.thread_view = Some(ActiveThreadView { + view: thread_view, + thread_id, + _notify: notify, + }); + + cx.notify(); + } + + fn title(&self, cx: &App) -> SharedString { + if let Some(active_thread_view) = &self.thread_view { + let thread_view = active_thread_view.view.read(cx); + if let Some(thread) = thread_view.thread() { + let title = thread.read(cx).title(); + if !title.is_empty() { + return title; + } + } + thread_view.title(cx) + } else { + "Thread".into() + } + } + + fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let position = self.position(window, cx); + let slot = match position { + UtilityPanePosition::Left => UtilityPaneSlot::Left, + UtilityPanePosition::Right => UtilityPaneSlot::Right, + }; + + let workspace = self.workspace.clone(); + let toggle_icon = self.toggle_icon(cx); + let title = self.title(cx); + + let pane_toggle_button = |workspace: WeakEntity| { + IconButton::new("toggle_utility_pane", toggle_icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Toggle Agent Pane")) + .on_click(move |_, window, cx| { + workspace + .update(cx, |workspace, cx| { + workspace.toggle_utility_pane(slot, window, cx) + }) + .ok(); + }) + }; + + h_flex() + .id("utility-pane-header") + .w_full() + .h(Tab::container_height(cx)) + .px_1p5() + .gap(DynamicSpacing::Base06.rems(cx)) + .when(slot == UtilityPaneSlot::Right, |this| { + this.flex_row_reverse() + }) + .flex_none() + .border_b_1() + .border_color(cx.theme().colors().border) + .child(pane_toggle_button(workspace)) + .child( + h_flex() + .size_full() + .min_w_0() + .gap_1() + .map(|this| { + if slot == UtilityPaneSlot::Right { + this.flex_row_reverse().justify_start() + } else { + this.justify_between() + } + }) + .child(Label::new(title).truncate()) + .child( + IconButton::new("close_btn", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Close Agent Pane")) + .on_click(cx.listener(|this, _: &gpui::ClickEvent, _window, cx| { + cx.emit(ClosePane); + this.thread_view = None; + cx.notify() + })), + ), + ) + } +} + +impl Focusable for AgentThreadPane { + fn focus_handle(&self, cx: &ui::App) -> gpui::FocusHandle { + if let Some(thread_view) = &self.thread_view { + thread_view.view.focus_handle(cx) + } else { + self.focus_handle.clone() + } + } +} + +impl UtilityPane for AgentThreadPane { + fn position(&self, _window: &Window, cx: &App) -> UtilityPanePosition { + match AgentSettings::get_global(cx).agents_panel_dock { + DockSide::Left => UtilityPanePosition::Left, + DockSide::Right => UtilityPanePosition::Right, + } + } + + fn toggle_icon(&self, _cx: &App) -> IconName { + IconName::Thread + } + + fn expanded(&self, _cx: &App) -> bool { + self.expanded + } + + fn set_expanded(&mut self, expanded: bool, cx: &mut Context) { + self.expanded = expanded; + cx.emit(AgentsUtilityPaneEvent::StateChanged); + cx.notify(); + } + + fn width(&self, _cx: &App) -> Pixels { + self.width.unwrap_or(DEFAULT_UTILITY_PANE_WIDTH) + } + + fn set_width(&mut self, width: Option, cx: &mut Context) { + self.width = width; + cx.emit(AgentsUtilityPaneEvent::StateChanged); + cx.notify(); + } +} + +impl Render for AgentThreadPane { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let content = if let Some(thread_view) = &self.thread_view { + div().size_full().child(thread_view.view.clone()) + } else { + div() + .size_full() + .flex() + .items_center() + .justify_center() + .child(Label::new("Select a thread to view details").size(LabelSize::Default)) + }; + + div() + .size_full() + .flex() + .flex_col() + .child(self.render_header(window, cx)) + .child(content) + } +} diff --git a/crates/agent_ui_v2/src/agent_ui_v2.rs b/crates/agent_ui_v2/src/agent_ui_v2.rs new file mode 100644 index 0000000000000000000000000000000000000000..92a4144e304e9afbdcdde54623a3bbf3c65b8746 --- /dev/null +++ b/crates/agent_ui_v2/src/agent_ui_v2.rs @@ -0,0 +1,4 @@ +mod agent_thread_pane; +mod thread_history; + +pub mod agents_panel; diff --git a/crates/agent_ui_v2/src/agents_panel.rs b/crates/agent_ui_v2/src/agents_panel.rs new file mode 100644 index 0000000000000000000000000000000000000000..254b8d2999dd3f9ce99c07a20273cbb1ca9cb929 --- /dev/null +++ b/crates/agent_ui_v2/src/agents_panel.rs @@ -0,0 +1,437 @@ +use agent::{HistoryEntry, HistoryEntryId, HistoryStore}; +use agent_settings::AgentSettings; +use anyhow::Result; +use assistant_text_thread::TextThreadStore; +use db::kvp::KEY_VALUE_STORE; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; +use fs::Fs; +use gpui::{ + Action, AsyncWindowContext, Entity, EventEmitter, Focusable, Pixels, Subscription, Task, + WeakEntity, actions, prelude::*, +}; +use project::Project; +use prompt_store::{PromptBuilder, PromptStore}; +use serde::{Deserialize, Serialize}; +use settings::{Settings as _, update_settings_file}; +use std::sync::Arc; +use ui::{App, Context, IconName, IntoElement, ParentElement, Render, Styled, Window}; +use util::ResultExt; +use workspace::{ + Panel, Workspace, + dock::{ClosePane, DockPosition, PanelEvent, UtilityPane}, + utility_pane::{UtilityPaneSlot, utility_slot_for_dock_position}, +}; + +use crate::agent_thread_pane::{ + AgentThreadPane, AgentsUtilityPaneEvent, SerializedAgentThreadPane, SerializedHistoryEntryId, +}; +use crate::thread_history::{AcpThreadHistory, ThreadHistoryEvent}; + +const AGENTS_PANEL_KEY: &str = "agents_panel"; + +#[derive(Serialize, Deserialize, Debug)] +struct SerializedAgentsPanel { + width: Option, + pane: Option, +} + +actions!( + agents, + [ + /// Toggle the visibility of the agents panel. + ToggleAgentsPanel + ] +); + +pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _, _| { + workspace.register_action(|workspace, _: &ToggleAgentsPanel, window, cx| { + workspace.toggle_panel_focus::(window, cx); + }); + }) + .detach(); +} + +pub struct AgentsPanel { + focus_handle: gpui::FocusHandle, + workspace: WeakEntity, + project: Entity, + agent_thread_pane: Option>, + history: Entity, + history_store: Entity, + prompt_store: Option>, + fs: Arc, + width: Option, + pending_serialization: Task>, + _subscriptions: Vec, +} + +impl AgentsPanel { + pub fn load( + workspace: WeakEntity, + cx: AsyncWindowContext, + ) -> Task, anyhow::Error>> { + cx.spawn(async move |cx| { + let serialized_panel = cx + .background_spawn(async move { + KEY_VALUE_STORE + .read_kvp(AGENTS_PANEL_KEY) + .ok() + .flatten() + .and_then(|panel| { + serde_json::from_str::(&panel).ok() + }) + }) + .await; + + let (fs, project, prompt_builder) = workspace.update(cx, |workspace, cx| { + let fs = workspace.app_state().fs.clone(); + let project = workspace.project().clone(); + let prompt_builder = PromptBuilder::load(fs.clone(), false, cx); + (fs, project, prompt_builder) + })?; + + let text_thread_store = workspace + .update(cx, |_, cx| { + TextThreadStore::new( + project.clone(), + prompt_builder.clone(), + Default::default(), + cx, + ) + })? + .await?; + + let prompt_store = workspace + .update(cx, |_, cx| PromptStore::global(cx))? + .await + .log_err(); + + workspace.update_in(cx, |_, window, cx| { + cx.new(|cx| { + let mut panel = Self::new( + workspace.clone(), + fs, + project, + prompt_store, + text_thread_store, + window, + cx, + ); + if let Some(serialized_panel) = serialized_panel { + panel.width = serialized_panel.width; + if let Some(serialized_pane) = serialized_panel.pane { + panel.restore_utility_pane(serialized_pane, window, cx); + } + } + panel + }) + }) + }) + } + + fn new( + workspace: WeakEntity, + fs: Arc, + project: Entity, + prompt_store: Option>, + text_thread_store: Entity, + window: &mut Window, + cx: &mut ui::Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + let history = cx.new(|cx| AcpThreadHistory::new(history_store.clone(), window, cx)); + + let this = cx.weak_entity(); + let subscriptions = vec![ + cx.subscribe_in(&history, window, Self::handle_history_event), + cx.on_flags_ready(move |_, cx| { + this.update(cx, |_, cx| { + cx.notify(); + }) + .ok(); + }), + ]; + + Self { + focus_handle, + workspace, + project, + agent_thread_pane: None, + history, + history_store, + prompt_store, + fs, + width: None, + pending_serialization: Task::ready(None), + _subscriptions: subscriptions, + } + } + + fn restore_utility_pane( + &mut self, + serialized_pane: SerializedAgentThreadPane, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread_id) = &serialized_pane.thread_id else { + return; + }; + + let entry = self + .history_store + .read(cx) + .entries() + .find(|e| match (&e.id(), thread_id) { + ( + HistoryEntryId::AcpThread(session_id), + SerializedHistoryEntryId::AcpThread(id), + ) => session_id.to_string() == *id, + (HistoryEntryId::TextThread(path), SerializedHistoryEntryId::TextThread(id)) => { + path.to_string_lossy() == *id + } + _ => false, + }); + + if let Some(entry) = entry { + self.open_thread( + entry, + serialized_pane.expanded, + serialized_pane.width, + window, + cx, + ); + } + } + + fn handle_utility_pane_event( + &mut self, + _utility_pane: Entity, + event: &AgentsUtilityPaneEvent, + cx: &mut Context, + ) { + match event { + AgentsUtilityPaneEvent::StateChanged => { + self.serialize(cx); + cx.notify(); + } + } + } + + fn handle_close_pane_event( + &mut self, + _utility_pane: Entity, + _event: &ClosePane, + cx: &mut Context, + ) { + self.agent_thread_pane = None; + self.serialize(cx); + cx.notify(); + } + + fn handle_history_event( + &mut self, + _history: &Entity, + event: &ThreadHistoryEvent, + window: &mut Window, + cx: &mut Context, + ) { + match event { + ThreadHistoryEvent::Open(entry) => { + self.open_thread(entry.clone(), true, None, window, cx); + } + } + } + + fn open_thread( + &mut self, + entry: HistoryEntry, + expanded: bool, + width: Option, + window: &mut Window, + cx: &mut Context, + ) { + let entry_id = entry.id(); + + if let Some(existing_pane) = &self.agent_thread_pane { + if existing_pane.read(cx).thread_id() == Some(entry_id) { + existing_pane.update(cx, |pane, cx| { + pane.set_expanded(true, cx); + }); + return; + } + } + + let fs = self.fs.clone(); + let workspace = self.workspace.clone(); + let project = self.project.clone(); + let history_store = self.history_store.clone(); + let prompt_store = self.prompt_store.clone(); + + let agent_thread_pane = cx.new(|cx| { + let mut pane = AgentThreadPane::new(workspace.clone(), cx); + pane.open_thread( + entry, + fs, + workspace.clone(), + project, + history_store, + prompt_store, + window, + cx, + ); + if let Some(width) = width { + pane.set_width(Some(width), cx); + } + pane.set_expanded(expanded, cx); + pane + }); + + let state_subscription = cx.subscribe(&agent_thread_pane, Self::handle_utility_pane_event); + let close_subscription = cx.subscribe(&agent_thread_pane, Self::handle_close_pane_event); + + self._subscriptions.push(state_subscription); + self._subscriptions.push(close_subscription); + + let slot = self.utility_slot(window, cx); + let panel_id = cx.entity_id(); + + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.register_utility_pane(slot, panel_id, agent_thread_pane.clone(), cx); + }); + } + + self.agent_thread_pane = Some(agent_thread_pane); + self.serialize(cx); + cx.notify(); + } + + fn utility_slot(&self, window: &Window, cx: &App) -> UtilityPaneSlot { + let position = self.position(window, cx); + utility_slot_for_dock_position(position) + } + + fn re_register_utility_pane(&mut self, window: &mut Window, cx: &mut Context) { + if let Some(pane) = &self.agent_thread_pane { + let slot = self.utility_slot(window, cx); + let panel_id = cx.entity_id(); + let pane = pane.clone(); + + if let Some(workspace) = self.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + workspace.register_utility_pane(slot, panel_id, pane, cx); + }); + } + } + } + + fn serialize(&mut self, cx: &mut Context) { + let width = self.width; + let pane = self + .agent_thread_pane + .as_ref() + .map(|pane| pane.read(cx).serialize()); + + self.pending_serialization = cx.background_spawn(async move { + KEY_VALUE_STORE + .write_kvp( + AGENTS_PANEL_KEY.into(), + serde_json::to_string(&SerializedAgentsPanel { width, pane }).unwrap(), + ) + .await + .log_err() + }); + } +} + +impl EventEmitter for AgentsPanel {} + +impl Focusable for AgentsPanel { + fn focus_handle(&self, _cx: &ui::App) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} + +impl Panel for AgentsPanel { + fn persistent_name() -> &'static str { + "AgentsPanel" + } + + fn panel_key() -> &'static str { + AGENTS_PANEL_KEY + } + + fn position(&self, _window: &Window, cx: &App) -> DockPosition { + match AgentSettings::get_global(cx).agents_panel_dock { + settings::DockSide::Left => DockPosition::Left, + settings::DockSide::Right => DockPosition::Right, + } + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + position != DockPosition::Bottom + } + + fn set_position( + &mut self, + position: DockPosition, + window: &mut Window, + cx: &mut Context, + ) { + update_settings_file(self.fs.clone(), cx, move |settings, _| { + settings.agent.get_or_insert_default().agents_panel_dock = Some(match position { + DockPosition::Left => settings::DockSide::Left, + DockPosition::Right | DockPosition::Bottom => settings::DockSide::Right, + }); + }); + self.re_register_utility_pane(window, cx); + } + + fn size(&self, window: &Window, cx: &App) -> Pixels { + let settings = AgentSettings::get_global(cx); + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => { + self.width.unwrap_or(settings.default_width) + } + DockPosition::Bottom => self.width.unwrap_or(settings.default_height), + } + } + + fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => self.width = size, + DockPosition::Bottom => {} + } + self.serialize(cx); + cx.notify(); + } + + fn icon(&self, _window: &Window, cx: &App) -> Option { + (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAgentTwo) + } + + fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { + Some("Agents Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleAgentsPanel) + } + + fn activation_priority(&self) -> u32 { + 4 + } + + fn enabled(&self, cx: &App) -> bool { + AgentSettings::get_global(cx).enabled(cx) && cx.has_flag::() + } +} + +impl Render for AgentsPanel { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + gpui::div().size_full().child(self.history.clone()) + } +} diff --git a/crates/agent_ui_v2/src/thread_history.rs b/crates/agent_ui_v2/src/thread_history.rs new file mode 100644 index 0000000000000000000000000000000000000000..8f6626814902a9489536439e90041437a527e151 --- /dev/null +++ b/crates/agent_ui_v2/src/thread_history.rs @@ -0,0 +1,735 @@ +use agent::{HistoryEntry, HistoryStore}; +use chrono::{Datelike as _, Local, NaiveDate, TimeDelta}; +use editor::{Editor, EditorEvent}; +use fuzzy::StringMatchCandidate; +use gpui::{ + App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Task, + UniformListScrollHandle, Window, actions, uniform_list, +}; +use std::{fmt::Display, ops::Range}; +use text::Bias; +use time::{OffsetDateTime, UtcOffset}; +use ui::{ + HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip, WithScrollbar, + prelude::*, +}; + +actions!( + agents, + [ + /// Removes all thread history. + RemoveHistory, + /// Removes the currently selected thread. + RemoveSelectedThread, + ] +); + +pub struct AcpThreadHistory { + pub(crate) history_store: Entity, + scroll_handle: UniformListScrollHandle, + selected_index: usize, + hovered_index: Option, + search_editor: Entity, + search_query: SharedString, + visible_items: Vec, + local_timezone: UtcOffset, + confirming_delete_history: bool, + _update_task: Task<()>, + _subscriptions: Vec, +} + +enum ListItemType { + BucketSeparator(TimeBucket), + Entry { + entry: HistoryEntry, + format: EntryTimeFormat, + }, + SearchResult { + entry: HistoryEntry, + positions: Vec, + }, +} + +impl ListItemType { + fn history_entry(&self) -> Option<&HistoryEntry> { + match self { + ListItemType::Entry { entry, .. } => Some(entry), + ListItemType::SearchResult { entry, .. } => Some(entry), + _ => None, + } + } +} + +#[allow(dead_code)] +pub enum ThreadHistoryEvent { + Open(HistoryEntry), +} + +impl EventEmitter for AcpThreadHistory {} + +impl AcpThreadHistory { + pub fn new( + history_store: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let search_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search threads...", window, cx); + editor + }); + + let search_editor_subscription = + cx.subscribe(&search_editor, |this, search_editor, event, cx| { + if let EditorEvent::BufferEdited = event { + let query = search_editor.read(cx).text(cx); + if this.search_query != query { + this.search_query = query.into(); + this.update_visible_items(false, cx); + } + } + }); + + let history_store_subscription = cx.observe(&history_store, |this, _, cx| { + this.update_visible_items(true, cx); + }); + + let scroll_handle = UniformListScrollHandle::default(); + + let mut this = Self { + history_store, + scroll_handle, + selected_index: 0, + hovered_index: None, + visible_items: Default::default(), + search_editor, + local_timezone: UtcOffset::from_whole_seconds( + chrono::Local::now().offset().local_minus_utc(), + ) + .unwrap(), + search_query: SharedString::default(), + confirming_delete_history: false, + _subscriptions: vec![search_editor_subscription, history_store_subscription], + _update_task: Task::ready(()), + }; + this.update_visible_items(false, cx); + this + } + + fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { + let entries = self + .history_store + .update(cx, |store, _| store.entries().collect()); + let new_list_items = if self.search_query.is_empty() { + self.add_list_separators(entries, cx) + } else { + self.filter_search_results(entries, cx) + }; + let selected_history_entry = if preserve_selected_item { + self.selected_history_entry().cloned() + } else { + None + }; + + self._update_task = cx.spawn(async move |this, cx| { + let new_visible_items = new_list_items.await; + this.update(cx, |this, cx| { + let new_selected_index = if let Some(history_entry) = selected_history_entry { + let history_entry_id = history_entry.id(); + new_visible_items + .iter() + .position(|visible_entry| { + visible_entry + .history_entry() + .is_some_and(|entry| entry.id() == history_entry_id) + }) + .unwrap_or(0) + } else { + 0 + }; + + this.visible_items = new_visible_items; + this.set_selected_index(new_selected_index, Bias::Right, cx); + cx.notify(); + }) + .ok(); + }); + } + + fn add_list_separators(&self, entries: Vec, cx: &App) -> Task> { + cx.background_spawn(async move { + let mut items = Vec::with_capacity(entries.len() + 1); + let mut bucket = None; + let today = Local::now().naive_local().date(); + + for entry in entries.into_iter() { + let entry_date = entry + .updated_at() + .with_timezone(&Local) + .naive_local() + .date(); + let entry_bucket = TimeBucket::from_dates(today, entry_date); + + if Some(entry_bucket) != bucket { + bucket = Some(entry_bucket); + items.push(ListItemType::BucketSeparator(entry_bucket)); + } + + items.push(ListItemType::Entry { + entry, + format: entry_bucket.into(), + }); + } + items + }) + } + + fn filter_search_results( + &self, + entries: Vec, + cx: &App, + ) -> Task> { + let query = self.search_query.clone(); + cx.background_spawn({ + let executor = cx.background_executor().clone(); + async move { + let mut candidates = Vec::with_capacity(entries.len()); + + for (idx, entry) in entries.iter().enumerate() { + candidates.push(StringMatchCandidate::new(idx, entry.title())); + } + + const MAX_MATCHES: usize = 100; + + let matches = fuzzy::match_strings( + &candidates, + &query, + false, + true, + MAX_MATCHES, + &Default::default(), + executor, + ) + .await; + + matches + .into_iter() + .map(|search_match| ListItemType::SearchResult { + entry: entries[search_match.candidate_id].clone(), + positions: search_match.positions, + }) + .collect() + } + }) + } + + fn search_produced_no_matches(&self) -> bool { + self.visible_items.is_empty() && !self.search_query.is_empty() + } + + fn selected_history_entry(&self) -> Option<&HistoryEntry> { + self.get_history_entry(self.selected_index) + } + + fn get_history_entry(&self, visible_items_ix: usize) -> Option<&HistoryEntry> { + self.visible_items.get(visible_items_ix)?.history_entry() + } + + fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context) { + if self.visible_items.is_empty() { + self.selected_index = 0; + return; + } + while matches!( + self.visible_items.get(index), + None | Some(ListItemType::BucketSeparator(..)) + ) { + index = match bias { + Bias::Left => { + if index == 0 { + self.visible_items.len() - 1 + } else { + index - 1 + } + } + Bias::Right => { + if index >= self.visible_items.len() - 1 { + 0 + } else { + index + 1 + } + } + }; + } + self.selected_index = index; + self.scroll_handle + .scroll_to_item(index, ScrollStrategy::Top); + cx.notify() + } + + pub fn select_previous( + &mut self, + _: &menu::SelectPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + if self.selected_index == 0 { + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); + } else { + self.set_selected_index(self.selected_index - 1, Bias::Left, cx); + } + } + + pub fn select_next( + &mut self, + _: &menu::SelectNext, + _window: &mut Window, + cx: &mut Context, + ) { + if self.selected_index == self.visible_items.len() - 1 { + self.set_selected_index(0, Bias::Right, cx); + } else { + self.set_selected_index(self.selected_index + 1, Bias::Right, cx); + } + } + + fn select_first( + &mut self, + _: &menu::SelectFirst, + _window: &mut Window, + cx: &mut Context, + ) { + self.set_selected_index(0, Bias::Right, cx); + } + + fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); + } + + fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + self.confirm_entry(self.selected_index, cx); + } + + fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(ix) else { + return; + }; + cx.emit(ThreadHistoryEvent::Open(entry.clone())); + } + + fn remove_selected_thread( + &mut self, + _: &RemoveSelectedThread, + _window: &mut Window, + cx: &mut Context, + ) { + self.remove_thread(self.selected_index, cx) + } + + fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(visible_item_ix) else { + return; + }; + + let task = match entry { + HistoryEntry::AcpThread(thread) => self + .history_store + .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)), + HistoryEntry::TextThread(text_thread) => self.history_store.update(cx, |this, cx| { + this.delete_text_thread(text_thread.path.clone(), cx) + }), + }; + task.detach_and_log_err(cx); + } + + fn remove_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.history_store.update(cx, |store, cx| { + store.delete_threads(cx).detach_and_log_err(cx) + }); + self.confirming_delete_history = false; + cx.notify(); + } + + fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = true; + cx.notify(); + } + + fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = false; + cx.notify(); + } + + fn render_list_items( + &mut self, + range: Range, + _window: &mut Window, + cx: &mut Context, + ) -> Vec { + self.visible_items + .get(range.clone()) + .into_iter() + .flatten() + .enumerate() + .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx)) + .collect() + } + + fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context) -> AnyElement { + match item { + ListItemType::Entry { entry, format } => self + .render_history_entry(entry, *format, ix, Vec::default(), cx) + .into_any(), + ListItemType::SearchResult { entry, positions } => self.render_history_entry( + entry, + EntryTimeFormat::DateAndTime, + ix, + positions.clone(), + cx, + ), + ListItemType::BucketSeparator(bucket) => div() + .px(DynamicSpacing::Base06.rems(cx)) + .pt_2() + .pb_1() + .child( + Label::new(bucket.to_string()) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .into_any_element(), + } + } + + fn render_history_entry( + &self, + entry: &HistoryEntry, + format: EntryTimeFormat, + ix: usize, + highlight_positions: Vec, + cx: &Context, + ) -> AnyElement { + let selected = ix == self.selected_index; + let hovered = Some(ix) == self.hovered_index; + let timestamp = entry.updated_at().timestamp(); + let thread_timestamp = format.format_timestamp(timestamp, self.local_timezone); + + h_flex() + .w_full() + .pb_1() + .child( + ListItem::new(ix) + .rounded() + .toggle_state(selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child( + HighlightedLabel::new(entry.title(), highlight_positions) + .size(LabelSize::Small) + .truncate(), + ) + .child( + Label::new(thread_timestamp) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .on_hover(cx.listener(move |this, is_hovered, _window, cx| { + if *is_hovered { + this.hovered_index = Some(ix); + } else if this.hovered_index == Some(ix) { + this.hovered_index = None; + } + + cx.notify(); + })) + .end_slot::(if hovered { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |_window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, cx) + }) + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_thread(ix, cx); + cx.stop_propagation() + })), + ) + } else { + None + }) + .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))), + ) + .into_any_element() + } +} + +impl Focusable for AcpThreadHistory { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.search_editor.focus_handle(cx) + } +} + +impl Render for AcpThreadHistory { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_no_history = self.history_store.read(cx).is_empty(cx); + + v_flex() + .key_context("ThreadHistory") + .size_full() + .bg(cx.theme().colors().panel_background) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::remove_selected_thread)) + .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| { + this.remove_history(window, cx); + })) + .child( + h_flex() + .h(Tab::container_height(cx)) + .w_full() + .py_1() + .px_2() + .gap_2() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(self.search_editor.clone()), + ) + .child({ + let view = v_flex() + .id("list-container") + .relative() + .overflow_hidden() + .flex_grow(); + + if has_no_history { + view.justify_center().items_center().child( + Label::new("You don't have any past threads yet.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + } else if self.search_produced_no_matches() { + view.justify_center() + .items_center() + .child(Label::new("No threads match your search.").size(LabelSize::Small)) + } else { + view.child( + uniform_list( + "thread-history", + self.visible_items.len(), + cx.processor(|this, range: Range, window, cx| { + this.render_list_items(range, window, cx) + }), + ) + .p_1() + .pr_4() + .track_scroll(&self.scroll_handle) + .flex_grow(), + ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) + } + }) + .when(!has_no_history, |this| { + this.child( + h_flex() + .p_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .when(!self.confirming_delete_history, |this| { + this.child( + Button::new("delete_history", "Delete All History") + .full_width() + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.prompt_delete_history(window, cx); + })), + ) + }) + .when(self.confirming_delete_history, |this| { + this.w_full() + .gap_2() + .flex_wrap() + .justify_between() + .child( + h_flex() + .flex_wrap() + .gap_1() + .child( + Label::new("Delete all threads?") + .size(LabelSize::Small), + ) + .child( + Label::new("You won't be able to recover them later.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .child( + h_flex() + .gap_1() + .child( + Button::new("cancel_delete", "Cancel") + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.cancel_delete_history(window, cx); + })), + ) + .child( + Button::new("confirm_delete", "Delete") + .style(ButtonStyle::Tinted(ui::TintColor::Error)) + .color(Color::Error) + .label_size(LabelSize::Small) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action( + Box::new(RemoveHistory), + cx, + ); + })), + ), + ) + }), + ) + }) + } +} + +#[derive(Clone, Copy)] +pub enum EntryTimeFormat { + DateAndTime, + TimeOnly, +} + +impl EntryTimeFormat { + fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String { + let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap(); + + match self { + EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp( + timestamp, + OffsetDateTime::now_utc(), + timezone, + time_format::TimestampFormat::EnhancedAbsolute, + ), + EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)), + } + } +} + +impl From for EntryTimeFormat { + fn from(bucket: TimeBucket) -> Self { + match bucket { + TimeBucket::Today => EntryTimeFormat::TimeOnly, + TimeBucket::Yesterday => EntryTimeFormat::TimeOnly, + TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime, + TimeBucket::PastWeek => EntryTimeFormat::DateAndTime, + TimeBucket::All => EntryTimeFormat::DateAndTime, + } + } +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +enum TimeBucket { + Today, + Yesterday, + ThisWeek, + PastWeek, + All, +} + +impl TimeBucket { + fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { + if date == reference { + return TimeBucket::Today; + } + + if date == reference - TimeDelta::days(1) { + return TimeBucket::Yesterday; + } + + let week = date.iso_week(); + + if reference.iso_week() == week { + return TimeBucket::ThisWeek; + } + + let last_week = (reference - TimeDelta::days(7)).iso_week(); + + if week == last_week { + return TimeBucket::PastWeek; + } + + TimeBucket::All + } +} + +impl Display for TimeBucket { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TimeBucket::Today => write!(f, "Today"), + TimeBucket::Yesterday => write!(f, "Yesterday"), + TimeBucket::ThisWeek => write!(f, "This Week"), + TimeBucket::PastWeek => write!(f, "Past Week"), + TimeBucket::All => write!(f, "All"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::NaiveDate; + + #[test] + fn test_time_bucket_from_dates() { + let today = NaiveDate::from_ymd_opt(2023, 1, 15).unwrap(); + + let date = today; + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Today); + + let date = NaiveDate::from_ymd_opt(2023, 1, 14).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Yesterday); + + let date = NaiveDate::from_ymd_opt(2023, 1, 13).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 11).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 8).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); + + let date = NaiveDate::from_ymd_opt(2023, 1, 5).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); + + // All: not in this week or last week + let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); + assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::All); + + // Test year boundary cases + let new_year = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); + + let date = NaiveDate::from_ymd_opt(2022, 12, 31).unwrap(); + assert_eq!( + TimeBucket::from_dates(new_year, date), + TimeBucket::Yesterday + ); + + let date = NaiveDate::from_ymd_opt(2022, 12, 28).unwrap(); + assert_eq!(TimeBucket::from_dates(new_year, date), TimeBucket::ThisWeek); + } +} diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index cd2077cdeb1370a9753df83f9b239ef776bab149..f0dde3eedea657ea2d2ebe9ede457e329bd8b9a5 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -12,6 +12,8 @@ pub use settings::{AnthropicAvailableModel as AvailableModel, ModelMode}; use strum::{EnumIter, EnumString}; use thiserror::Error; +pub mod batches; + pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com"; #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] @@ -67,6 +69,13 @@ pub enum Model { alias = "claude-opus-4-1-thinking-latest" )] ClaudeOpus4_1Thinking, + #[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")] + ClaudeOpus4_5, + #[serde( + rename = "claude-opus-4-5-thinking", + alias = "claude-opus-4-5-thinking-latest" + )] + ClaudeOpus4_5Thinking, #[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")] ClaudeSonnet4, #[serde( @@ -131,6 +140,14 @@ impl Model { } pub fn from_id(id: &str) -> Result { + if id.starts_with("claude-opus-4-5-thinking") { + return Ok(Self::ClaudeOpus4_5Thinking); + } + + if id.starts_with("claude-opus-4-5") { + return Ok(Self::ClaudeOpus4_5); + } + if id.starts_with("claude-opus-4-1-thinking") { return Ok(Self::ClaudeOpus4_1Thinking); } @@ -208,6 +225,8 @@ impl Model { Self::ClaudeOpus4_1 => "claude-opus-4-1-latest", Self::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest", Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest", + Self::ClaudeOpus4_5 => "claude-opus-4-5-latest", + Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking-latest", Self::ClaudeSonnet4 => "claude-sonnet-4-latest", Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest", Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest", @@ -230,6 +249,7 @@ impl Model { match self { Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514", Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805", + Self::ClaudeOpus4_5 | Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-20251101", Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514", Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929", Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest", @@ -249,6 +269,8 @@ impl Model { Self::ClaudeOpus4_1 => "Claude Opus 4.1", Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking", Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking", + Self::ClaudeOpus4_5 => "Claude Opus 4.5", + Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking", Self::ClaudeSonnet4 => "Claude Sonnet 4", Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking", Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5", @@ -274,6 +296,8 @@ impl Model { | Self::ClaudeOpus4_1 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -303,6 +327,8 @@ impl Model { | Self::ClaudeOpus4_1 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -326,6 +352,8 @@ impl Model { | Self::ClaudeOpus4_1 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -348,6 +376,8 @@ impl Model { | Self::ClaudeOpus4_1 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -372,6 +402,7 @@ impl Model { match self { Self::ClaudeOpus4 | Self::ClaudeOpus4_1 + | Self::ClaudeOpus4_5 | Self::ClaudeSonnet4 | Self::ClaudeSonnet4_5 | Self::Claude3_5Sonnet @@ -383,6 +414,7 @@ impl Model { | Self::Claude3Haiku => AnthropicModelMode::Default, Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5Thinking | Self::ClaudeHaiku4_5Thinking @@ -393,19 +425,28 @@ impl Model { } } - pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"]; - - pub fn beta_headers(&self) -> String { - let mut headers = Self::DEFAULT_BETA_HEADERS - .iter() - .map(|header| header.to_string()) - .collect::>(); + pub fn beta_headers(&self) -> Option { + let mut headers = vec![]; match self { + Self::ClaudeOpus4 + | Self::ClaudeOpus4_1 + | Self::ClaudeOpus4_5 + | Self::ClaudeSonnet4 + | Self::ClaudeSonnet4_5 + | Self::ClaudeOpus4Thinking + | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5Thinking + | Self::ClaudeSonnet4Thinking + | Self::ClaudeSonnet4_5Thinking => { + // Fine-grained tool streaming for newer models + headers.push("fine-grained-tool-streaming-2025-05-14".to_string()); + } Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => { // Try beta token-efficient tool use (supported in Claude 3.7 Sonnet only) // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use headers.push("token-efficient-tools-2025-02-19".to_string()); + headers.push("fine-grained-tool-streaming-2025-05-14".to_string()); } Self::Custom { extra_beta_headers, .. @@ -420,7 +461,11 @@ impl Model { _ => {} } - headers.join(",") + if headers.is_empty() { + None + } else { + Some(headers.join(",")) + } } pub fn tool_model_id(&self) -> &str { @@ -436,60 +481,112 @@ impl Model { } } -pub async fn complete( +/// Generate completion with streaming. +pub async fn stream_completion( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: Request, + beta_headers: Option, +) -> Result>, AnthropicError> { + stream_completion_with_rate_limit_info(client, api_url, api_key, request, beta_headers) + .await + .map(|output| output.0) +} + +/// Generate completion without streaming. +pub async fn non_streaming_completion( client: &dyn HttpClient, api_url: &str, api_key: &str, request: Request, - beta_headers: String, + beta_headers: Option, ) -> Result { + let (mut response, rate_limits) = + send_request(client, api_url, api_key, &request, beta_headers).await?; + + if response.status().is_success() { + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(AnthropicError::ReadResponse)?; + + serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse) + } else { + Err(handle_error_response(response, rate_limits).await) + } +} + +async fn send_request( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: impl Serialize, + beta_headers: Option, +) -> Result<(http::Response, RateLimitInfo), AnthropicError> { let uri = format!("{api_url}/v1/messages"); - let request_builder = HttpRequest::builder() + + let mut request_builder = HttpRequest::builder() .method(Method::POST) .uri(uri) .header("Anthropic-Version", "2023-06-01") - .header("Anthropic-Beta", beta_headers) .header("X-Api-Key", api_key.trim()) .header("Content-Type", "application/json"); + if let Some(beta_headers) = beta_headers { + request_builder = request_builder.header("Anthropic-Beta", beta_headers); + } + let serialized_request = serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?; let request = request_builder .body(AsyncBody::from(serialized_request)) .map_err(AnthropicError::BuildRequestBody)?; - let mut response = client + let response = client .send(request) .await .map_err(AnthropicError::HttpSend)?; - let status_code = response.status(); + + let rate_limits = RateLimitInfo::from_headers(response.headers()); + + Ok((response, rate_limits)) +} + +async fn handle_error_response( + mut response: http::Response, + rate_limits: RateLimitInfo, +) -> AnthropicError { + if response.status().as_u16() == 529 { + return AnthropicError::ServerOverloaded { + retry_after: rate_limits.retry_after, + }; + } + + if let Some(retry_after) = rate_limits.retry_after { + return AnthropicError::RateLimit { retry_after }; + } + let mut body = String::new(); - response + let read_result = response .body_mut() .read_to_string(&mut body) .await - .map_err(AnthropicError::ReadResponse)?; + .map_err(AnthropicError::ReadResponse); - if status_code.is_success() { - Ok(serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)?) - } else { - Err(AnthropicError::HttpResponseError { - status_code, - message: body, - }) + if let Err(err) = read_result { + return err; } -} -pub async fn stream_completion( - client: &dyn HttpClient, - api_url: &str, - api_key: &str, - request: Request, - beta_headers: String, -) -> Result>, AnthropicError> { - stream_completion_with_rate_limit_info(client, api_url, api_key, request, beta_headers) - .await - .map(|output| output.0) + match serde_json::from_str::(&body) { + Ok(Event::Error { error }) => AnthropicError::ApiError(error), + Ok(_) | Err(_) => AnthropicError::HttpResponseError { + status_code: response.status(), + message: body, + }, + } } /// An individual rate limit. @@ -583,7 +680,7 @@ pub async fn stream_completion_with_rate_limit_info( api_url: &str, api_key: &str, request: Request, - beta_headers: String, + beta_headers: Option, ) -> Result< ( BoxStream<'static, Result>, @@ -595,26 +692,10 @@ pub async fn stream_completion_with_rate_limit_info( base: request, stream: true, }; - let uri = format!("{api_url}/v1/messages"); - let request_builder = HttpRequest::builder() - .method(Method::POST) - .uri(uri) - .header("Anthropic-Version", "2023-06-01") - .header("Anthropic-Beta", beta_headers) - .header("X-Api-Key", api_key.trim()) - .header("Content-Type", "application/json"); - let serialized_request = - serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?; - let request = request_builder - .body(AsyncBody::from(serialized_request)) - .map_err(AnthropicError::BuildRequestBody)?; + let (response, rate_limits) = + send_request(client, api_url, api_key, &request, beta_headers).await?; - let mut response = client - .send(request) - .await - .map_err(AnthropicError::HttpSend)?; - let rate_limits = RateLimitInfo::from_headers(response.headers()); if response.status().is_success() { let reader = BufReader::new(response.into_body()); let stream = reader @@ -633,27 +714,8 @@ pub async fn stream_completion_with_rate_limit_info( }) .boxed(); Ok((stream, Some(rate_limits))) - } else if response.status().as_u16() == 529 { - Err(AnthropicError::ServerOverloaded { - retry_after: rate_limits.retry_after, - }) - } else if let Some(retry_after) = rate_limits.retry_after { - Err(AnthropicError::RateLimit { retry_after }) } else { - let mut body = String::new(); - response - .body_mut() - .read_to_string(&mut body) - .await - .map_err(AnthropicError::ReadResponse)?; - - match serde_json::from_str::(&body) { - Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)), - Ok(_) | Err(_) => Err(AnthropicError::HttpResponseError { - status_code: response.status(), - message: body, - }), - } + Err(handle_error_response(response, rate_limits).await) } } @@ -990,6 +1052,71 @@ pub fn parse_prompt_too_long(message: &str) -> Option { .ok() } +/// Request body for the token counting API. +/// Similar to `Request` but without `max_tokens` since it's not needed for counting. +#[derive(Debug, Serialize)] +pub struct CountTokensRequest { + pub model: String, + pub messages: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub system: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub tools: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub thinking: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub tool_choice: Option, +} + +/// Response from the token counting API. +#[derive(Debug, Deserialize)] +pub struct CountTokensResponse { + pub input_tokens: u64, +} + +/// Count the number of tokens in a message without creating it. +pub async fn count_tokens( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: CountTokensRequest, +) -> Result { + let uri = format!("{api_url}/v1/messages/count_tokens"); + + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Anthropic-Version", "2023-06-01") + .header("X-Api-Key", api_key.trim()) + .header("Content-Type", "application/json"); + + let serialized_request = + serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?; + let http_request = request_builder + .body(AsyncBody::from(serialized_request)) + .map_err(AnthropicError::BuildRequestBody)?; + + let mut response = client + .send(http_request) + .await + .map_err(AnthropicError::HttpSend)?; + + let rate_limits = RateLimitInfo::from_headers(response.headers()); + + if response.status().is_success() { + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(AnthropicError::ReadResponse)?; + + serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse) + } else { + Err(handle_error_response(response, rate_limits).await) + } +} + #[test] fn test_match_window_exceeded() { let error = ApiError { diff --git a/crates/anthropic/src/batches.rs b/crates/anthropic/src/batches.rs new file mode 100644 index 0000000000000000000000000000000000000000..5fb594348d45c84e8c246c2611f7cde3aa77a18d --- /dev/null +++ b/crates/anthropic/src/batches.rs @@ -0,0 +1,190 @@ +use anyhow::Result; +use futures::AsyncReadExt; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use serde::{Deserialize, Serialize}; + +use crate::{AnthropicError, ApiError, RateLimitInfo, Request, Response}; + +#[derive(Debug, Serialize, Deserialize)] +pub struct BatchRequest { + pub custom_id: String, + pub params: Request, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CreateBatchRequest { + pub requests: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MessageBatchRequestCounts { + pub processing: u64, + pub succeeded: u64, + pub errored: u64, + pub canceled: u64, + pub expired: u64, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MessageBatch { + pub id: String, + #[serde(rename = "type")] + pub batch_type: String, + pub processing_status: String, + pub request_counts: MessageBatchRequestCounts, + pub ended_at: Option, + pub created_at: String, + pub expires_at: String, + pub archived_at: Option, + pub cancel_initiated_at: Option, + pub results_url: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum BatchResult { + #[serde(rename = "succeeded")] + Succeeded { message: Response }, + #[serde(rename = "errored")] + Errored { error: ApiError }, + #[serde(rename = "canceled")] + Canceled, + #[serde(rename = "expired")] + Expired, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct BatchIndividualResponse { + pub custom_id: String, + pub result: BatchResult, +} + +pub async fn create_batch( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: CreateBatchRequest, +) -> Result { + let uri = format!("{api_url}/v1/messages/batches"); + + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Anthropic-Version", "2023-06-01") + .header("X-Api-Key", api_key.trim()) + .header("Content-Type", "application/json"); + + let serialized_request = + serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?; + let http_request = request_builder + .body(AsyncBody::from(serialized_request)) + .map_err(AnthropicError::BuildRequestBody)?; + + let mut response = client + .send(http_request) + .await + .map_err(AnthropicError::HttpSend)?; + + let rate_limits = RateLimitInfo::from_headers(response.headers()); + + if response.status().is_success() { + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(AnthropicError::ReadResponse)?; + + serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse) + } else { + Err(crate::handle_error_response(response, rate_limits).await) + } +} + +pub async fn retrieve_batch( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + message_batch_id: &str, +) -> Result { + let uri = format!("{api_url}/v1/messages/batches/{message_batch_id}"); + + let request_builder = HttpRequest::builder() + .method(Method::GET) + .uri(uri) + .header("Anthropic-Version", "2023-06-01") + .header("X-Api-Key", api_key.trim()); + + let http_request = request_builder + .body(AsyncBody::default()) + .map_err(AnthropicError::BuildRequestBody)?; + + let mut response = client + .send(http_request) + .await + .map_err(AnthropicError::HttpSend)?; + + let rate_limits = RateLimitInfo::from_headers(response.headers()); + + if response.status().is_success() { + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(AnthropicError::ReadResponse)?; + + serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse) + } else { + Err(crate::handle_error_response(response, rate_limits).await) + } +} + +pub async fn retrieve_batch_results( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + message_batch_id: &str, +) -> Result, AnthropicError> { + let uri = format!("{api_url}/v1/messages/batches/{message_batch_id}/results"); + + let request_builder = HttpRequest::builder() + .method(Method::GET) + .uri(uri) + .header("Anthropic-Version", "2023-06-01") + .header("X-Api-Key", api_key.trim()); + + let http_request = request_builder + .body(AsyncBody::default()) + .map_err(AnthropicError::BuildRequestBody)?; + + let mut response = client + .send(http_request) + .await + .map_err(AnthropicError::HttpSend)?; + + let rate_limits = RateLimitInfo::from_headers(response.headers()); + + if response.status().is_success() { + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(AnthropicError::ReadResponse)?; + + let mut results = Vec::new(); + for line in body.lines() { + if line.trim().is_empty() { + continue; + } + let result: BatchIndividualResponse = + serde_json::from_str(line).map_err(AnthropicError::DeserializeResponse)?; + results.push(result); + } + + Ok(results) + } else { + Err(crate::handle_error_response(response, rate_limits).await) + } +} diff --git a/crates/askpass/src/askpass.rs b/crates/askpass/src/askpass.rs index 81cdd355bf7173b3954a8c2731a0728d354253ba..ab4474aa62faedbfac0bc680a52648fa824c83a1 100644 --- a/crates/askpass/src/askpass.rs +++ b/crates/askpass/src/askpass.rs @@ -205,13 +205,9 @@ impl PasswordProxy { } else { ShellKind::Posix }; - let askpass_program = ASKPASS_PROGRAM - .get_or_init(|| current_exec) - .try_shell_safe(shell_kind) - .context("Failed to shell-escape Askpass program path.")? - .to_string(); + let askpass_program = ASKPASS_PROGRAM.get_or_init(|| current_exec); // Create an askpass script that communicates back to this process. - let askpass_script = generate_askpass_script(&askpass_program, &askpass_socket); + let askpass_script = generate_askpass_script(shell_kind, askpass_program, &askpass_socket)?; let _task = executor.spawn(async move { maybe!(async move { let listener = @@ -253,10 +249,15 @@ impl PasswordProxy { fs::write(&askpass_script_path, askpass_script) .await .with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?; - make_file_executable(&askpass_script_path).await?; + make_file_executable(&askpass_script_path) + .await + .with_context(|| { + format!("marking askpass script executable at {askpass_script_path:?}") + })?; + // todo(shell): There might be no powershell on the system #[cfg(target_os = "windows")] let askpass_helper = format!( - "powershell.exe -ExecutionPolicy Bypass -File {}", + "powershell.exe -ExecutionPolicy Bypass -File \"{}\"", askpass_script_path.display() ); @@ -334,23 +335,51 @@ pub fn set_askpass_program(path: std::path::PathBuf) { #[inline] #[cfg(not(target_os = "windows"))] -fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String { - format!( +fn generate_askpass_script( + shell_kind: ShellKind, + askpass_program: &std::path::Path, + askpass_socket: &std::path::Path, +) -> Result { + let askpass_program = shell_kind.prepend_command_prefix( + askpass_program + .to_str() + .context("Askpass program is on a non-utf8 path")?, + ); + let askpass_program = shell_kind + .try_quote_prefix_aware(&askpass_program) + .context("Failed to shell-escape Askpass program path")?; + let askpass_socket = askpass_socket + .try_shell_safe(shell_kind) + .context("Failed to shell-escape Askpass socket path")?; + let print_args = "printf '%s\\0' \"$@\""; + let shebang = "#!/bin/sh"; + Ok(format!( "{shebang}\n{print_args} | {askpass_program} --askpass={askpass_socket} 2> /dev/null \n", - askpass_socket = askpass_socket.display(), - print_args = "printf '%s\\0' \"$@\"", - shebang = "#!/bin/sh", - ) + )) } #[inline] #[cfg(target_os = "windows")] -fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String { - format!( +fn generate_askpass_script( + shell_kind: ShellKind, + askpass_program: &std::path::Path, + askpass_socket: &std::path::Path, +) -> Result { + let askpass_program = shell_kind.prepend_command_prefix( + askpass_program + .to_str() + .context("Askpass program is on a non-utf8 path")?, + ); + let askpass_program = shell_kind + .try_quote_prefix_aware(&askpass_program) + .context("Failed to shell-escape Askpass program path")?; + let askpass_socket = askpass_socket + .try_shell_safe(shell_kind) + .context("Failed to shell-escape Askpass socket path")?; + Ok(format!( r#" $ErrorActionPreference = 'Stop'; - ($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null + ($args -join [char]0) | {askpass_program} --askpass={askpass_socket} 2> $null "#, - askpass_socket = askpass_socket.display(), - ) + )) } diff --git a/crates/assistant_slash_commands/Cargo.toml b/crates/assistant_slash_commands/Cargo.toml index 85dd92501f93fb79ba1d3f70b3a06f1077356cfa..b2a70449f449f73c7d0017c5d2ba3707e271559a 100644 --- a/crates/assistant_slash_commands/Cargo.toml +++ b/crates/assistant_slash_commands/Cargo.toml @@ -22,7 +22,6 @@ feature_flags.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true -globset.workspace = true gpui.workspace = true html_to_markdown.workspace = true http_client.workspace = true diff --git a/crates/assistant_slash_commands/src/diagnostics_command.rs b/crates/assistant_slash_commands/src/diagnostics_command.rs index 3a9c33061575d385652b685dcca70ee87c6cac35..3b3e3f7b895d50b36c3981bf4ee442b09bfdf33f 100644 --- a/crates/assistant_slash_commands/src/diagnostics_command.rs +++ b/crates/assistant_slash_commands/src/diagnostics_command.rs @@ -233,18 +233,11 @@ fn collect_diagnostics( options: Options, cx: &mut App, ) -> Task>> { - let error_source = if let Some(path_matcher) = &options.path_matcher { - debug_assert_eq!(path_matcher.sources().len(), 1); - Some(path_matcher.sources().first().cloned().unwrap_or_default()) - } else { - None - }; - let path_style = project.read(cx).path_style(cx); let glob_is_exact_file_match = if let Some(path) = options .path_matcher .as_ref() - .and_then(|pm| pm.sources().first()) + .and_then(|pm| pm.sources().next()) { project .read(cx) @@ -266,6 +259,13 @@ fn collect_diagnostics( .collect(); cx.spawn(async move |cx| { + let error_source = if let Some(path_matcher) = &options.path_matcher { + debug_assert_eq!(path_matcher.sources().count(), 1); + Some(path_matcher.sources().next().unwrap_or_default()) + } else { + None + }; + let mut output = SlashCommandOutput::default(); if let Some(error_source) = error_source.as_ref() { @@ -277,7 +277,7 @@ fn collect_diagnostics( let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { if let Some(path_matcher) = &options.path_matcher - && !path_matcher.is_match(&path.as_std_path()) + && !path_matcher.is_match(&path) { continue; } diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index a17e198ed300f00f70d35149cbe0286af3a65a57..ae4e8363b40d520b9ea33e5cba5ffa68d783ab04 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -226,10 +226,10 @@ fn collect_files( let Ok(matchers) = glob_inputs .iter() .map(|glob_input| { - custom_path_matcher::PathMatcher::new(&[glob_input.to_owned()]) + util::paths::PathMatcher::new(&[glob_input.to_owned()], project.read(cx).path_style(cx)) .with_context(|| format!("invalid path {glob_input}")) }) - .collect::>>() + .collect::>>() else { return futures::stream::once(async { anyhow::bail!("invalid path"); @@ -250,6 +250,7 @@ fn collect_files( let worktree_id = snapshot.id(); let path_style = snapshot.path_style(); let mut directory_stack: Vec> = Vec::new(); + let mut folded_directory_path: Option> = None; let mut folded_directory_names: Arc = RelPath::empty().into(); let mut is_top_level_directory = true; @@ -277,6 +278,16 @@ fn collect_files( )))?; } + if let Some(folded_path) = &folded_directory_path { + if !entry.path.starts_with(folded_path) { + folded_directory_names = RelPath::empty().into(); + folded_directory_path = None; + if directory_stack.is_empty() { + is_top_level_directory = true; + } + } + } + let filename = entry.path.file_name().unwrap_or_default().to_string(); if entry.is_dir() { @@ -292,13 +303,17 @@ fn collect_files( folded_directory_names = folded_directory_names.join(RelPath::unix(&filename).unwrap()); } + folded_directory_path = Some(entry.path.clone()); continue; } } else { // Skip empty directories folded_directory_names = RelPath::empty().into(); + folded_directory_path = None; continue; } + + // Render the directory (either folded or normal) if folded_directory_names.is_empty() { let label = if is_top_level_directory { is_top_level_directory = false; @@ -334,6 +349,8 @@ fn collect_files( }, )))?; directory_stack.push(entry.path.clone()); + folded_directory_names = RelPath::empty().into(); + folded_directory_path = None; } events_tx.unbounded_send(Ok(SlashCommandEvent::Content( SlashCommandContent::Text { @@ -447,87 +464,6 @@ pub fn build_entry_output_section( } } -/// This contains a small fork of the util::paths::PathMatcher, that is stricter about the prefix -/// check. Only subpaths pass the prefix check, rather than any prefix. -mod custom_path_matcher { - use globset::{Glob, GlobSet, GlobSetBuilder}; - use std::fmt::Debug as _; - use util::{paths::SanitizedPath, rel_path::RelPath}; - - #[derive(Clone, Debug, Default)] - pub struct PathMatcher { - sources: Vec, - sources_with_trailing_slash: Vec, - glob: GlobSet, - } - - impl std::fmt::Display for PathMatcher { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.sources.fmt(f) - } - } - - impl PartialEq for PathMatcher { - fn eq(&self, other: &Self) -> bool { - self.sources.eq(&other.sources) - } - } - - impl Eq for PathMatcher {} - - impl PathMatcher { - pub fn new(globs: &[String]) -> Result { - let globs = globs - .iter() - .map(|glob| Glob::new(&SanitizedPath::new(glob).to_string())) - .collect::, _>>()?; - let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect(); - let sources_with_trailing_slash = globs - .iter() - .map(|glob| glob.glob().to_string() + "/") - .collect(); - let mut glob_builder = GlobSetBuilder::new(); - for single_glob in globs { - glob_builder.add(single_glob); - } - let glob = glob_builder.build()?; - Ok(PathMatcher { - glob, - sources, - sources_with_trailing_slash, - }) - } - - pub fn is_match(&self, other: &RelPath) -> bool { - self.sources - .iter() - .zip(self.sources_with_trailing_slash.iter()) - .any(|(source, with_slash)| { - let as_bytes = other.as_unix_str().as_bytes(); - let with_slash = if source.ends_with('/') { - source.as_bytes() - } else { - with_slash.as_bytes() - }; - - as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes()) - }) - || self.glob.is_match(other.as_std_path()) - || self.check_with_end_separator(other) - } - - fn check_with_end_separator(&self, path: &RelPath) -> bool { - let path_str = path.as_unix_str(); - let separator = "/"; - if path_str.ends_with(separator) { - false - } else { - self.glob.is_match(path_str.to_string() + separator) - } - } - } -} - pub fn append_buffer_to_output( buffer: &BufferSnapshot, path: Option<&str>, diff --git a/crates/assistant_text_thread/Cargo.toml b/crates/assistant_text_thread/Cargo.toml index 8dfdfa3828340217456088a246eee5b1568a7a77..5ad429758ea1785ecb4fcecb2f3ad83a71afda0d 100644 --- a/crates/assistant_text_thread/Cargo.toml +++ b/crates/assistant_text_thread/Cargo.toml @@ -29,6 +29,7 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true language_model.workspace = true log.workspace = true @@ -45,7 +46,7 @@ serde_json.workspace = true settings.workspace = true smallvec.workspace = true smol.workspace = true -telemetry_events.workspace = true +telemetry.workspace = true text.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/assistant_text_thread/src/assistant_text_thread_tests.rs b/crates/assistant_text_thread/src/assistant_text_thread_tests.rs index 75a414dfc4428b3c101a72454bb185b5a171d692..7232a03c212a9dfc4bfe9bcce4a78667d9210ad8 100644 --- a/crates/assistant_text_thread/src/assistant_text_thread_tests.rs +++ b/crates/assistant_text_thread/src/assistant_text_thread_tests.rs @@ -50,7 +50,6 @@ fn test_inserting_and_removing_messages(cx: &mut App) { TextThread::local( registry, None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -189,7 +188,6 @@ fn test_message_splitting(cx: &mut App) { TextThread::local( registry.clone(), None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -294,7 +292,6 @@ fn test_messages_for_offsets(cx: &mut App) { TextThread::local( registry, None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -405,7 +402,6 @@ async fn test_slash_commands(cx: &mut TestAppContext) { TextThread::local( registry.clone(), None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -677,7 +673,6 @@ async fn test_serialization(cx: &mut TestAppContext) { TextThread::local( registry.clone(), None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -724,7 +719,6 @@ async fn test_serialization(cx: &mut TestAppContext) { prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), None, - None, cx, ) }); @@ -780,7 +774,6 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), None, - None, cx, ) }); @@ -880,10 +873,9 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std let num_sections = rng.random_range(0..=3); let mut section_start = 0; for _ in 0..num_sections { - let mut section_end = rng.random_range(section_start..=output_text.len()); - while !output_text.is_char_boundary(section_end) { - section_end += 1; - } + let section_end = output_text.floor_char_boundary( + rng.random_range(section_start..=output_text.len()), + ); events.push(Ok(SlashCommandEvent::StartSection { icon: IconName::Ai, label: "section".into(), @@ -1042,7 +1034,6 @@ fn test_mark_cache_anchors(cx: &mut App) { TextThread::local( registry, None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, @@ -1369,7 +1360,6 @@ fn setup_context_editor_with_fake_model( TextThread::local( registry, None, - None, prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), cx, diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index 9ad383cdfd43eed236268349e2ff97c34a0178c0..5ec72eb0814f9ac09aba36f52d6f011af5b47249 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -5,22 +5,25 @@ use assistant_slash_command::{ SlashCommandResult, SlashCommandWorkingSet, }; use assistant_slash_commands::FileCommandMetadata; -use client::{self, ModelRequestUsage, RequestUsage, proto, telemetry::Telemetry}; +use client::{self, ModelRequestUsage, RequestUsage, proto}; use clock::ReplicaId; -use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; +use cloud_llm_client::{CompletionIntent, UsageLimit}; use collections::{HashMap, HashSet}; use fs::{Fs, RenameOptions}; + use futures::{FutureExt, StreamExt, future::Shared}; use gpui::{ App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription, - Task, + Task, WeakEntity, }; +use itertools::Itertools as _; use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset}; use language_model::{ - LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent, - LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, + AnthropicCompletionType, AnthropicEventData, AnthropicEventType, LanguageModel, + LanguageModelCacheConfiguration, LanguageModelCompletionEvent, LanguageModelImage, + LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolUseId, MessageContent, PaymentRequiredError, Role, StopReason, - report_assistant_event, + report_anthropic_event, }; use open_ai::Model as OpenAiModel; use paths::text_threads_dir; @@ -38,7 +41,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase}; + use text::{BufferSnapshot, ToPoint}; use ui::IconName; use util::{ResultExt, TryFutureExt, post_inc}; @@ -667,7 +670,7 @@ pub struct TextThread { buffer: Entity, pub(crate) parsed_slash_commands: Vec, invoked_slash_commands: HashMap, - edits_since_last_parse: language::Subscription, + edits_since_last_parse: language::Subscription, slash_commands: Arc, pub(crate) slash_command_output_sections: Vec>, thought_process_output_sections: Vec>, @@ -684,9 +687,8 @@ pub struct TextThread { pending_cache_warming_task: Task>, path: Option>, _subscriptions: Vec, - telemetry: Option>, language_registry: Arc, - project: Option>, + project: Option>, prompt_builder: Arc, completion_mode: agent_settings::CompletionMode, } @@ -706,8 +708,7 @@ impl EventEmitter for TextThread {} impl TextThread { pub fn local( language_registry: Arc, - project: Option>, - telemetry: Option>, + project: Option>, prompt_builder: Arc, slash_commands: Arc, cx: &mut Context, @@ -720,7 +721,6 @@ impl TextThread { prompt_builder, slash_commands, project, - telemetry, cx, ) } @@ -740,8 +740,7 @@ impl TextThread { language_registry: Arc, prompt_builder: Arc, slash_commands: Arc, - project: Option>, - telemetry: Option>, + project: Option>, cx: &mut Context, ) -> Self { let buffer = cx.new(|_cx| { @@ -782,7 +781,6 @@ impl TextThread { completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, path: None, buffer, - telemetry, project, language_registry, slash_commands, @@ -795,7 +793,7 @@ impl TextThread { }); let message = MessageAnchor { id: first_message_id, - start: language::Anchor::MIN, + start: language::Anchor::min_for_buffer(this.buffer.read(cx).remote_id()), }; this.messages_metadata.insert( first_message_id, @@ -871,8 +869,7 @@ impl TextThread { language_registry: Arc, prompt_builder: Arc, slash_commands: Arc, - project: Option>, - telemetry: Option>, + project: Option>, cx: &mut Context, ) -> Self { let id = saved_context.id.clone().unwrap_or_else(TextThreadId::new); @@ -884,7 +881,6 @@ impl TextThread { prompt_builder, slash_commands, project, - telemetry, cx, ); this.path = Some(path); @@ -1145,12 +1141,10 @@ impl TextThread { cx: &App, ) -> bool { let version = &self.buffer.read(cx).version; - let observed_start = range.start == language::Anchor::MIN - || range.start == language::Anchor::MAX - || version.observed(range.start.timestamp); - let observed_end = range.end == language::Anchor::MIN - || range.end == language::Anchor::MAX - || version.observed(range.end.timestamp); + let observed_start = + range.start.is_min() || range.start.is_max() || version.observed(range.start.timestamp); + let observed_end = + range.end.is_min() || range.end.is_max() || version.observed(range.end.timestamp); observed_start && observed_end } @@ -1167,10 +1161,6 @@ impl TextThread { self.language_registry.clone() } - pub fn project(&self) -> Option> { - self.project.clone() - } - pub fn prompt_builder(&self) -> Arc { self.prompt_builder.clone() } @@ -1416,6 +1406,7 @@ impl TextThread { role: Role::User, content: vec!["Respond only with OK, nothing else.".into()], cache: false, + reasoning_details: None, }); req }; @@ -1851,14 +1842,17 @@ impl TextThread { } if ensure_trailing_newline - && buffer.contains_str_at(command_range_end, "\n") + && buffer + .chars_at(command_range_end) + .next() + .is_some_and(|c| c == '\n') { - let newline_offset = insert_position.saturating_sub(1); - if buffer.contains_str_at(newline_offset, "\n") + if let Some((prev_char, '\n')) = + buffer.reversed_chars_at(insert_position).next_tuple() && last_section_range.is_none_or(|last_section_range| { !last_section_range .to_offset(buffer) - .contains(&newline_offset) + .contains(&(insert_position - prev_char.len_utf8())) }) { deletions.push((command_range_end..command_range_end + 1, "")); @@ -2073,16 +2067,22 @@ impl TextThread { }); match event { - LanguageModelCompletionEvent::StatusUpdate(status_update) => { - if let CompletionRequestStatus::UsageUpdated { amount, limit } = status_update { - this.update_model_request_usage( - amount as u32, - limit, - cx, - ); - } + LanguageModelCompletionEvent::Started | + LanguageModelCompletionEvent::Queued {..} | + LanguageModelCompletionEvent::ToolUseLimitReached { .. } => {} + LanguageModelCompletionEvent::UsageUpdated { amount, limit } => { + this.update_model_request_usage( + amount as u32, + limit, + cx, + ); } LanguageModelCompletionEvent::StartMessage { .. } => {} + LanguageModelCompletionEvent::ReasoningDetails(_) => { + // ReasoningDetails are metadata (signatures, encrypted data, format info) + // used for request/response validation, not UI content. + // The displayable thinking text is already handled by the Thinking event. + } LanguageModelCompletionEvent::Stop(reason) => { stop_reason = reason; } @@ -2206,24 +2206,26 @@ impl TextThread { .read(cx) .language() .map(|language| language.name()); - report_assistant_event( - AssistantEventData { - conversation_id: Some(this.id.0.clone()), - kind: AssistantKind::Panel, - phase: AssistantPhase::Response, - message_id: None, - model: model.telemetry_id(), - model_provider: model.provider_id().to_string(), - response_latency, - error_message, - language_name: language_name.map(|name| name.to_proto()), - }, - this.telemetry.clone(), - cx.http_client(), - model.api_key(cx), - cx.background_executor(), + + telemetry::event!( + "Assistant Responded", + conversation_id = this.id.0.clone(), + kind = "panel", + phase = "response", + model = model.telemetry_id(), + model_provider = model.provider_id().to_string(), + response_latency, + error_message, + language_name = language_name.as_ref().map(|name| name.to_proto()), ); + report_anthropic_event(&model, AnthropicEventData { + completion_type: AnthropicCompletionType::Panel, + event: AnthropicEventType::Response, + language_name: language_name.map(|name| name.to_proto()), + message_id: None, + }, cx); + if let Ok(stop_reason) = result { match stop_reason { StopReason::ToolUse => {} @@ -2306,6 +2308,7 @@ impl TextThread { role: message.role, content: Vec::new(), cache: message.cache.as_ref().is_some_and(|cache| cache.is_anchor), + reasoning_details: None, }; while let Some(content) = contents.peek() { @@ -2677,6 +2680,7 @@ impl TextThread { role: Role::User, content: vec![SUMMARIZE_THREAD_PROMPT.into()], cache: false, + reasoning_details: None, }); // If there is no summary, it is set with `done: false` so that "Loading Summary…" can @@ -2844,7 +2848,8 @@ impl TextThread { messages.next(); } } - let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX); + let message_end_anchor = + message_end.unwrap_or(language::Anchor::max_for_buffer(buffer.remote_id())); let message_end = message_end_anchor.to_offset(buffer); return Some(Message { @@ -2920,6 +2925,7 @@ impl TextThread { RenameOptions { overwrite: true, ignore_if_exists: true, + create_parents: false, }, ) .await?; @@ -2953,7 +2959,7 @@ impl TextThread { } fn update_model_request_usage(&self, amount: u32, limit: UsageLimit, cx: &mut App) { - let Some(project) = &self.project else { + let Some(project) = self.project.as_ref().and_then(|project| project.upgrade()) else { return; }; project.read(cx).user_store().update(cx, |user_store, cx| { diff --git a/crates/assistant_text_thread/src/text_thread_store.rs b/crates/assistant_text_thread/src/text_thread_store.rs index 19c317baf0fa728c77faebc388b5e36008aa39b3..483baa73134334162ea30d269a1f955dd8fe023a 100644 --- a/crates/assistant_text_thread/src/text_thread_store.rs +++ b/crates/assistant_text_thread/src/text_thread_store.rs @@ -4,7 +4,7 @@ use crate::{ }; use anyhow::{Context as _, Result}; use assistant_slash_command::{SlashCommandId, SlashCommandWorkingSet}; -use client::{Client, TypedEnvelope, proto, telemetry::Telemetry}; +use client::{Client, TypedEnvelope, proto}; use clock::ReplicaId; use collections::HashMap; use context_server::ContextServerId; @@ -48,10 +48,9 @@ pub struct TextThreadStore { fs: Arc, languages: Arc, slash_commands: Arc, - telemetry: Arc, _watch_updates: Task>, client: Arc, - project: Entity, + project: WeakEntity, project_is_shared: bool, client_subscription: Option, _project_subscriptions: Vec, @@ -88,7 +87,6 @@ impl TextThreadStore { ) -> Task>> { let fs = project.read(cx).fs().clone(); let languages = project.read(cx).languages().clone(); - let telemetry = project.read(cx).client().telemetry().clone(); cx.spawn(async move |cx| { const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100); let (mut events, _) = fs.watch(text_threads_dir(), CONTEXT_WATCH_DURATION).await; @@ -102,7 +100,6 @@ impl TextThreadStore { fs, languages, slash_commands, - telemetry, _watch_updates: cx.spawn(async move |this, cx| { async move { while events.next().await.is_some() { @@ -119,10 +116,10 @@ impl TextThreadStore { ], project_is_shared: false, client: project.read(cx).client(), - project: project.clone(), + project: project.downgrade(), prompt_builder, }; - this.handle_project_shared(project.clone(), cx); + this.handle_project_shared(cx); this.synchronize_contexts(cx); this.register_context_server_handlers(cx); this.reload(cx).detach_and_log_err(cx); @@ -143,10 +140,9 @@ impl TextThreadStore { fs: project.read(cx).fs().clone(), languages: project.read(cx).languages().clone(), slash_commands: Arc::default(), - telemetry: project.read(cx).client().telemetry().clone(), _watch_updates: Task::ready(None), client: project.read(cx).client(), - project, + project: project.downgrade(), project_is_shared: false, client_subscription: None, _project_subscriptions: Default::default(), @@ -180,8 +176,10 @@ impl TextThreadStore { ) -> Result { let context_id = TextThreadId::from_proto(envelope.payload.context_id); let operations = this.update(&mut cx, |this, cx| { + let project = this.project.upgrade().context("project not found")?; + anyhow::ensure!( - !this.project.read(cx).is_via_collab(), + !project.read(cx).is_via_collab(), "only the host contexts can be opened" ); @@ -211,8 +209,9 @@ impl TextThreadStore { mut cx: AsyncApp, ) -> Result { let (context_id, operations) = this.update(&mut cx, |this, cx| { + let project = this.project.upgrade().context("project not found")?; anyhow::ensure!( - !this.project.read(cx).is_via_collab(), + !project.read(cx).is_via_collab(), "can only create contexts as the host" ); @@ -255,8 +254,9 @@ impl TextThreadStore { mut cx: AsyncApp, ) -> Result { this.update(&mut cx, |this, cx| { + let project = this.project.upgrade().context("project not found")?; anyhow::ensure!( - !this.project.read(cx).is_via_collab(), + !project.read(cx).is_via_collab(), "only the host can synchronize contexts" ); @@ -293,8 +293,12 @@ impl TextThreadStore { })? } - fn handle_project_shared(&mut self, _: Entity, cx: &mut Context) { - let is_shared = self.project.read(cx).is_shared(); + fn handle_project_shared(&mut self, cx: &mut Context) { + let Some(project) = self.project.upgrade() else { + return; + }; + + let is_shared = project.read(cx).is_shared(); let was_shared = mem::replace(&mut self.project_is_shared, is_shared); if is_shared == was_shared { return; @@ -309,7 +313,7 @@ impl TextThreadStore { false } }); - let remote_id = self.project.read(cx).remote_id().unwrap(); + let remote_id = project.read(cx).remote_id().unwrap(); self.client_subscription = self .client .subscribe_to_entity(remote_id) @@ -323,13 +327,13 @@ impl TextThreadStore { fn handle_project_event( &mut self, - project: Entity, + _project: Entity, event: &project::Event, cx: &mut Context, ) { match event { project::Event::RemoteIdChanged(_) => { - self.handle_project_shared(project, cx); + self.handle_project_shared(cx); } project::Event::Reshared => { self.advertise_contexts(cx); @@ -371,7 +375,6 @@ impl TextThreadStore { TextThread::local( self.languages.clone(), Some(self.project.clone()), - Some(self.telemetry.clone()), self.prompt_builder.clone(), self.slash_commands.clone(), cx, @@ -382,7 +385,10 @@ impl TextThreadStore { } pub fn create_remote(&mut self, cx: &mut Context) -> Task>> { - let project = self.project.read(cx); + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("project was dropped"))); + }; + let project = project.read(cx); let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow::anyhow!("project was not remote"))); }; @@ -391,7 +397,7 @@ impl TextThreadStore { let capability = project.capability(); let language_registry = self.languages.clone(); let project = self.project.clone(); - let telemetry = self.telemetry.clone(); + let prompt_builder = self.prompt_builder.clone(); let slash_commands = self.slash_commands.clone(); let request = self.client.request(proto::CreateContext { project_id }); @@ -408,7 +414,6 @@ impl TextThreadStore { prompt_builder, slash_commands, Some(project), - Some(telemetry), cx, ) })?; @@ -446,7 +451,6 @@ impl TextThreadStore { let fs = self.fs.clone(); let languages = self.languages.clone(); let project = self.project.clone(); - let telemetry = self.telemetry.clone(); let load = cx.background_spawn({ let path = path.clone(); async move { @@ -467,7 +471,6 @@ impl TextThreadStore { prompt_builder, slash_commands, Some(project), - Some(telemetry), cx, ) })?; @@ -541,7 +544,10 @@ impl TextThreadStore { text_thread_id: TextThreadId, cx: &mut Context, ) -> Task>> { - let project = self.project.read(cx); + let Some(project) = self.project.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("project was dropped"))); + }; + let project = project.read(cx); let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow::anyhow!("project was not remote"))); }; @@ -554,7 +560,6 @@ impl TextThreadStore { let capability = project.capability(); let language_registry = self.languages.clone(); let project = self.project.clone(); - let telemetry = self.telemetry.clone(); let request = self.client.request(proto::OpenContext { project_id, context_id: text_thread_id.to_proto(), @@ -573,7 +578,6 @@ impl TextThreadStore { prompt_builder, slash_commands, Some(project), - Some(telemetry), cx, ) })?; @@ -618,7 +622,10 @@ impl TextThreadStore { event: &TextThreadEvent, cx: &mut Context, ) { - let Some(project_id) = self.project.read(cx).remote_id() else { + let Some(project) = self.project.upgrade() else { + return; + }; + let Some(project_id) = project.read(cx).remote_id() else { return; }; @@ -652,12 +659,14 @@ impl TextThreadStore { } fn advertise_contexts(&self, cx: &App) { - let Some(project_id) = self.project.read(cx).remote_id() else { + let Some(project) = self.project.upgrade() else { + return; + }; + let Some(project_id) = project.read(cx).remote_id() else { return; }; - // For now, only the host can advertise their open contexts. - if self.project.read(cx).is_via_collab() { + if project.read(cx).is_via_collab() { return; } @@ -689,7 +698,10 @@ impl TextThreadStore { } fn synchronize_contexts(&mut self, cx: &mut Context) { - let Some(project_id) = self.project.read(cx).remote_id() else { + let Some(project) = self.project.upgrade() else { + return; + }; + let Some(project_id) = project.read(cx).remote_id() else { return; }; @@ -828,7 +840,10 @@ impl TextThreadStore { } fn register_context_server_handlers(&self, cx: &mut Context) { - let context_server_store = self.project.read(cx).context_server_store(); + let Some(project) = self.project.upgrade() else { + return; + }; + let context_server_store = project.read(cx).context_server_store(); cx.subscribe(&context_server_store, Self::handle_context_server_event) .detach(); diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index ae7c869493d8ca33528800f91c446e9546c952d0..6f352fbd7b74138d29a1f4f350b4d958139f11d5 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -21,6 +21,7 @@ http_client.workspace = true log.workspace = true paths.workspace = true release_channel.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index bd44eb714c08f9a5c698e92570a9edb518c5c806..0c122717d7a377e5aa5e8d23cab4de435bd67e33 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -2,16 +2,17 @@ use anyhow::{Context as _, Result}; use client::Client; use db::kvp::KEY_VALUE_STORE; use gpui::{ - App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion, - Task, Window, actions, + App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, Task, Window, + actions, }; use http_client::{HttpClient, HttpClientWithUrl}; use paths::remote_servers_dir; use release_channel::{AppCommitSha, ReleaseChannel}; +use semver::Version; use serde::{Deserialize, Serialize}; use settings::{RegisterSetting, Settings, SettingsStore}; +use smol::fs::File; use smol::{fs, io::AsyncReadExt}; -use smol::{fs::File, process::Command}; use std::mem; use std::{ env::{ @@ -23,6 +24,7 @@ use std::{ sync::Arc, time::Duration, }; +use util::command::new_smol_command; use workspace::Workspace; const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification"; @@ -43,7 +45,7 @@ actions!( #[derive(Clone, Debug, PartialEq, Eq)] pub enum VersionCheckType { Sha(AppCommitSha), - Semantic(SemanticVersion), + Semantic(Version), } #[derive(Serialize, Debug)] @@ -99,7 +101,7 @@ impl AutoUpdateStatus { pub struct AutoUpdater { status: AutoUpdateStatus, - current_version: SemanticVersion, + current_version: Version, client: Arc, pending_poll: Option>>, quit_subscription: Option, @@ -121,7 +123,7 @@ impl Drop for MacOsUnmounter<'_> { let mount_path = mem::take(&mut self.mount_path); self.background_executor .spawn(async move { - let unmount_output = Command::new("hdiutil") + let unmount_output = new_smol_command("hdiutil") .args(["detach", "-force"]) .arg(&mount_path) .output() @@ -255,7 +257,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> { match release_channel { ReleaseChannel::Stable | ReleaseChannel::Preview => { let auto_updater = auto_updater.read(cx); - let current_version = auto_updater.current_version; + let current_version = auto_updater.current_version.clone(); let release_channel = release_channel.dev_name(); let path = format!("/releases/{release_channel}/{current_version}"); let url = &auto_updater.client.http_client().build_url(&path); @@ -321,7 +323,7 @@ impl AutoUpdater { cx.default_global::().0.clone() } - fn new(current_version: SemanticVersion, client: Arc, cx: &mut Context) -> Self { + fn new(current_version: Version, client: Arc, cx: &mut Context) -> Self { // On windows, executable files cannot be overwritten while they are // running, so we must wait to overwrite the application until quitting // or restarting. When quitting the app, we spawn the auto update helper @@ -350,8 +352,7 @@ impl AutoUpdater { pub fn start_polling(&self, cx: &mut Context) -> Task> { cx.spawn(async move |this, cx| { - #[cfg(target_os = "windows")] - { + if cfg!(target_os = "windows") { use util::ResultExt; cleanup_windows() @@ -400,8 +401,8 @@ impl AutoUpdater { })); } - pub fn current_version(&self) -> SemanticVersion { - self.current_version + pub fn current_version(&self) -> Version { + self.current_version.clone() } pub fn status(&self) -> AutoUpdateStatus { @@ -422,7 +423,7 @@ impl AutoUpdater { // Ok(None). pub async fn download_remote_server_release( release_channel: ReleaseChannel, - version: Option, + version: Option, os: &str, arch: &str, set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static, @@ -469,7 +470,7 @@ impl AutoUpdater { pub async fn get_remote_server_release_url( channel: ReleaseChannel, - version: Option, + version: Option, os: &str, arch: &str, cx: &mut AsyncApp, @@ -491,7 +492,7 @@ impl AutoUpdater { async fn get_release_asset( this: &Entity, release_channel: ReleaseChannel, - version: Option, + version: Option, asset: &str, os: &str, arch: &str, @@ -509,7 +510,9 @@ impl AutoUpdater { (None, None, None) }; - let version = if let Some(version) = version { + let version = if let Some(mut version) = version { + version.pre = semver::Prerelease::EMPTY; + version.build = semver::BuildMetadata::EMPTY; version.to_string() } else { "latest".to_string() @@ -554,7 +557,7 @@ impl AutoUpdater { this.read_with(cx, |this, cx| { ( this.client.http_client(), - this.current_version, + this.current_version.clone(), this.status.clone(), ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable), ) @@ -627,16 +630,20 @@ impl AutoUpdater { fn check_if_fetched_version_is_newer( release_channel: ReleaseChannel, app_commit_sha: Result>, - installed_version: SemanticVersion, + installed_version: Version, fetched_version: String, status: AutoUpdateStatus, ) -> Result> { - let parsed_fetched_version = fetched_version.parse::(); + let parsed_fetched_version = fetched_version.parse::(); if let AutoUpdateStatus::Updated { version, .. } = status { match version { VersionCheckType::Sha(cached_version) => { - let should_download = fetched_version != cached_version.full(); + let should_download = + parsed_fetched_version.as_ref().ok().is_none_or(|version| { + version.build.as_str().rsplit('.').next() + != Some(&cached_version.full()) + }); let newer_version = should_download .then(|| VersionCheckType::Sha(AppCommitSha::new(fetched_version))); return Ok(newer_version); @@ -655,7 +662,11 @@ impl AutoUpdater { let should_download = app_commit_sha .ok() .flatten() - .map(|sha| fetched_version != sha) + .map(|sha| { + parsed_fetched_version.as_ref().ok().is_none_or(|version| { + version.build.as_str().rsplit('.').next() != Some(&sha) + }) + }) .unwrap_or(true); let newer_version = should_download .then(|| VersionCheckType::Sha(AppCommitSha::new(fetched_version))); @@ -708,9 +719,12 @@ impl AutoUpdater { } fn check_if_fetched_version_is_newer_non_nightly( - installed_version: SemanticVersion, - fetched_version: SemanticVersion, + mut installed_version: Version, + fetched_version: Version, ) -> Result> { + // For non-nightly releases, ignore build and pre-release fields as they're not provided by our endpoints right now. + installed_version.build = semver::BuildMetadata::EMPTY; + installed_version.pre = semver::Prerelease::EMPTY; let should_download = fetched_version > installed_version; let newer_version = should_download.then(|| VersionCheckType::Semantic(fetched_version)); Ok(newer_version) @@ -800,7 +814,7 @@ async fn install_release_linux( .await .context("failed to create directory into which to extract update")?; - let output = Command::new("tar") + let output = new_smol_command("tar") .arg("-xzf") .arg(&downloaded_tar_gz) .arg("-C") @@ -835,7 +849,7 @@ async fn install_release_linux( to = PathBuf::from(prefix); } - let output = Command::new("rsync") + let output = new_smol_command("rsync") .args(["-av", "--delete"]) .arg(&from) .arg(&to) @@ -867,7 +881,7 @@ async fn install_release_macos( let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into(); mounted_app_path.push("/"); - let output = Command::new("hdiutil") + let output = new_smol_command("hdiutil") .args(["attach", "-nobrowse"]) .arg(&downloaded_dmg) .arg("-mountroot") @@ -887,7 +901,7 @@ async fn install_release_macos( background_executor: cx.background_executor(), }; - let output = Command::new("rsync") + let output = new_smol_command("rsync") .args(["-av", "--delete"]) .arg(&mounted_app_path) .arg(&running_app_path) @@ -903,34 +917,22 @@ async fn install_release_macos( Ok(None) } -#[cfg(target_os = "windows")] async fn cleanup_windows() -> Result<()> { - use util::ResultExt; - let parent = std::env::current_exe()? .parent() .context("No parent dir for Zed.exe")? .to_owned(); // keep in sync with crates/auto_update_helper/src/updater.rs - smol::fs::remove_dir(parent.join("updates")) - .await - .context("failed to remove updates dir") - .log_err(); - smol::fs::remove_dir(parent.join("install")) - .await - .context("failed to remove install dir") - .log_err(); - smol::fs::remove_dir(parent.join("old")) - .await - .context("failed to remove old version dir") - .log_err(); + _ = smol::fs::remove_dir(parent.join("updates")).await; + _ = smol::fs::remove_dir(parent.join("install")).await; + _ = smol::fs::remove_dir(parent.join("old")).await; Ok(()) } async fn install_release_windows(downloaded_installer: PathBuf) -> Result> { - let output = Command::new(downloaded_installer) + let output = new_smol_command(downloaded_installer) .arg("/verysilent") .arg("/update=true") .arg("!desktopicon") @@ -1032,7 +1034,7 @@ mod tests { cx.update(|cx| { settings::init(cx); - let current_version = SemanticVersion::new(0, 100, 0); + let current_version = semver::Version::new(0, 100, 0); release_channel::init_test(current_version, ReleaseChannel::Stable, cx); let clock = Arc::new(FakeSystemClock::new()); @@ -1071,7 +1073,7 @@ mod tests { auto_updater.read_with(cx, |updater, _| { assert_eq!(updater.status(), AutoUpdateStatus::Idle); - assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0)); + assert_eq!(updater.current_version(), semver::Version::new(0, 100, 0)); }); release_available.store(true, atomic::Ordering::SeqCst); @@ -1090,7 +1092,7 @@ mod tests { assert_eq!( status, AutoUpdateStatus::Downloading { - version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1)) + version: VersionCheckType::Semantic(semver::Version::new(0, 100, 1)) } ); @@ -1120,7 +1122,7 @@ mod tests { assert_eq!( status, AutoUpdateStatus::Updated { - version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1)) + version: VersionCheckType::Semantic(semver::Version::new(0, 100, 1)) } ); let will_restart = cx.expect_restart(); @@ -1134,9 +1136,9 @@ mod tests { fn test_stable_does_not_update_when_fetched_version_is_not_higher() { let release_channel = ReleaseChannel::Stable; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Idle; - let fetched_version = SemanticVersion::new(1, 0, 0); + let fetched_version = semver::Version::new(1, 0, 0); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1153,9 +1155,9 @@ mod tests { fn test_stable_does_update_when_fetched_version_is_higher() { let release_channel = ReleaseChannel::Stable; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Idle; - let fetched_version = SemanticVersion::new(1, 0, 1); + let fetched_version = semver::Version::new(1, 0, 1); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1175,11 +1177,11 @@ mod tests { fn test_stable_does_not_update_when_fetched_version_is_not_higher_than_cached() { let release_channel = ReleaseChannel::Stable; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)), + version: VersionCheckType::Semantic(semver::Version::new(1, 0, 1)), }; - let fetched_version = SemanticVersion::new(1, 0, 1); + let fetched_version = semver::Version::new(1, 0, 1); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1196,11 +1198,11 @@ mod tests { fn test_stable_does_update_when_fetched_version_is_higher_than_cached() { let release_channel = ReleaseChannel::Stable; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Updated { - version: VersionCheckType::Semantic(SemanticVersion::new(1, 0, 1)), + version: VersionCheckType::Semantic(semver::Version::new(1, 0, 1)), }; - let fetched_version = SemanticVersion::new(1, 0, 2); + let fetched_version = semver::Version::new(1, 0, 2); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1220,9 +1222,10 @@ mod tests { fn test_nightly_does_not_update_when_fetched_sha_is_same() { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let mut installed_version = semver::Version::new(1, 0, 0); + installed_version.build = semver::BuildMetadata::new("a").unwrap(); let status = AutoUpdateStatus::Idle; - let fetched_sha = "a".to_string(); + let fetched_sha = "1.0.0+a".to_string(); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1239,7 +1242,7 @@ mod tests { fn test_nightly_does_update_when_fetched_sha_is_not_same() { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Idle; let fetched_sha = "b".to_string(); @@ -1258,14 +1261,15 @@ mod tests { } #[test] - fn test_nightly_does_not_update_when_fetched_sha_is_same_as_cached() { + fn test_nightly_does_not_update_when_fetched_version_is_same_as_cached() { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let mut installed_version = semver::Version::new(1, 0, 0); + installed_version.build = semver::BuildMetadata::new("a").unwrap(); let status = AutoUpdateStatus::Updated { version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; - let fetched_sha = "b".to_string(); + let fetched_sha = "1.0.0+b".to_string(); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1282,11 +1286,12 @@ mod tests { fn test_nightly_does_update_when_fetched_sha_is_not_same_as_cached() { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(Some("a".to_string())); - let installed_version = SemanticVersion::new(1, 0, 0); + let mut installed_version = semver::Version::new(1, 0, 0); + installed_version.build = semver::BuildMetadata::new("a").unwrap(); let status = AutoUpdateStatus::Updated { version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; - let fetched_sha = "c".to_string(); + let fetched_sha = "1.0.0+c".to_string(); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1306,7 +1311,7 @@ mod tests { fn test_nightly_does_update_when_installed_versions_sha_cannot_be_retrieved() { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(None); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Idle; let fetched_sha = "a".to_string(); @@ -1329,11 +1334,11 @@ mod tests { { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(None); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Updated { version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; - let fetched_sha = "b".to_string(); + let fetched_sha = "1.0.0+b".to_string(); let newer_version = AutoUpdater::check_if_fetched_version_is_newer( release_channel, @@ -1351,7 +1356,7 @@ mod tests { { let release_channel = ReleaseChannel::Nightly; let app_commit_sha = Ok(None); - let installed_version = SemanticVersion::new(1, 0, 0); + let installed_version = semver::Version::new(1, 0, 0); let status = AutoUpdateStatus::Updated { version: VersionCheckType::Sha(AppCommitSha::new("b".to_string())), }; diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index f146583d3bc69b167b61339278a475827bf28d0b..076e11fb4eef1e5c53e2bdc290be7117330c3e61 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -1,6 +1,6 @@ use std::{ - cell::LazyCell, path::Path, + sync::LazyLock, time::{Duration, Instant}, }; @@ -13,8 +13,8 @@ use windows::Win32::{ use crate::windows_impl::WM_JOB_UPDATED; pub(crate) struct Job { - pub apply: Box Result<()>>, - pub rollback: Box Result<()>>, + pub apply: Box Result<()> + Send + Sync>, + pub rollback: Box Result<()> + Send + Sync>, } impl Job { @@ -154,10 +154,8 @@ impl Job { } } -// app is single threaded #[cfg(not(test))] -#[allow(clippy::declare_interior_mutable_const)] -pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| { +pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| { fn p(value: &str) -> &Path { Path::new(value) } @@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| { ] }); -// app is single threaded #[cfg(test)] -#[allow(clippy::declare_interior_mutable_const)] -pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| { +pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| { fn p(value: &str) -> &Path { Path::new(value) } diff --git a/crates/auto_update_ui/Cargo.toml b/crates/auto_update_ui/Cargo.toml index 0e31f94f5ee268cdc3274dea747bd0b05d9c80eb..2b1421e35dcbcf6fac40cd0e97a3dc839da58d9e 100644 --- a/crates/auto_update_ui/Cargo.toml +++ b/crates/auto_update_ui/Cargo.toml @@ -20,6 +20,7 @@ gpui.workspace = true http_client.workspace = true markdown_preview.workspace = true release_channel.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index aeaa6ae93e635a6cab1487400fb58bd7be1bc6e1..6c32ee3b6c9b9c4974a287ff0e9a988472cecf3b 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -148,7 +148,9 @@ pub fn notify_if_app_was_updated(cx: &mut App) { let should_show_notification = should_show_notification.await?; if should_show_notification { cx.update(|cx| { - let version = updater.read(cx).current_version(); + let mut version = updater.read(cx).current_version(); + version.build = semver::BuildMetadata::EMPTY; + version.pre = semver::Prerelease::EMPTY; let app_name = ReleaseChannel::global(cx).display_name(); show_app_notification( NotificationId::unique::(), diff --git a/crates/bedrock/src/bedrock.rs b/crates/bedrock/src/bedrock.rs index ec0b4070906fdfd31195668312b3e7b425cd28ee..744dde38076a5a12c9bc957a75e2435b1b753d96 100644 --- a/crates/bedrock/src/bedrock.rs +++ b/crates/bedrock/src/bedrock.rs @@ -87,7 +87,7 @@ pub async fn stream_completion( Ok(None) => None, Err(err) => Some(( Err(BedrockError::ClientError(anyhow!( - "{:?}", + "{}", aws_sdk_bedrockruntime::error::DisplayErrorContext(err) ))), stream, diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs index 1691ffe199975983fbb40b781aac00a2703871ea..51e1b29f9ad3cf953605c5c59090785f3ab45eac 100644 --- a/crates/bedrock/src/models.rs +++ b/crates/bedrock/src/models.rs @@ -51,6 +51,13 @@ pub enum Model { alias = "claude-opus-4-1-thinking-latest" )] ClaudeOpus4_1Thinking, + #[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")] + ClaudeOpus4_5, + #[serde( + rename = "claude-opus-4-5-thinking", + alias = "claude-opus-4-5-thinking-latest" + )] + ClaudeOpus4_5Thinking, #[serde(rename = "claude-3-5-sonnet-v2", alias = "claude-3-5-sonnet-latest")] Claude3_5SonnetV2, #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")] @@ -141,7 +148,19 @@ impl Model { } pub fn from_id(id: &str) -> anyhow::Result { - if id.starts_with("claude-3-5-sonnet-v2") { + if id.starts_with("claude-opus-4-5-thinking") { + Ok(Self::ClaudeOpus4_5Thinking) + } else if id.starts_with("claude-opus-4-5") { + Ok(Self::ClaudeOpus4_5) + } else if id.starts_with("claude-opus-4-1-thinking") { + Ok(Self::ClaudeOpus4_1Thinking) + } else if id.starts_with("claude-opus-4-1") { + Ok(Self::ClaudeOpus4_1) + } else if id.starts_with("claude-opus-4-thinking") { + Ok(Self::ClaudeOpus4Thinking) + } else if id.starts_with("claude-opus-4") { + Ok(Self::ClaudeOpus4) + } else if id.starts_with("claude-3-5-sonnet-v2") { Ok(Self::Claude3_5SonnetV2) } else if id.starts_with("claude-3-opus") { Ok(Self::Claude3Opus) @@ -178,6 +197,8 @@ impl Model { Model::ClaudeOpus4_1 => "claude-opus-4-1", Model::ClaudeOpus4Thinking => "claude-opus-4-thinking", Model::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking", + Model::ClaudeOpus4_5 => "claude-opus-4-5", + Model::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking", Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2", Model::Claude3_5Sonnet => "claude-3-5-sonnet", Model::Claude3Opus => "claude-3-opus", @@ -245,6 +266,9 @@ impl Model { Model::ClaudeOpus4_1 | Model::ClaudeOpus4_1Thinking => { "anthropic.claude-opus-4-1-20250805-v1:0" } + Model::ClaudeOpus4_5 | Model::ClaudeOpus4_5Thinking => { + "anthropic.claude-opus-4-5-20251101-v1:0" + } Model::Claude3_5SonnetV2 => "anthropic.claude-3-5-sonnet-20241022-v2:0", Model::Claude3_5Sonnet => "anthropic.claude-3-5-sonnet-20240620-v1:0", Model::Claude3Opus => "anthropic.claude-3-opus-20240229-v1:0", @@ -309,6 +333,8 @@ impl Model { Self::ClaudeOpus4_1 => "Claude Opus 4.1", Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking", Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking", + Self::ClaudeOpus4_5 => "Claude Opus 4.5", + Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking", Self::Claude3_5SonnetV2 => "Claude 3.5 Sonnet v2", Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", Self::Claude3Opus => "Claude 3 Opus", @@ -379,7 +405,9 @@ impl Model { | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::ClaudeOpus4Thinking - | Self::ClaudeOpus4_1Thinking => 200_000, + | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking => 200_000, Self::AmazonNovaPremier => 1_000_000, Self::PalmyraWriterX5 => 1_000_000, Self::PalmyraWriterX4 => 128_000, @@ -393,7 +421,11 @@ impl Model { Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096, Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000, Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000, - Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::ClaudeHaiku4_5 => 64_000, + Self::ClaudeSonnet4_5 + | Self::ClaudeSonnet4_5Thinking + | Self::ClaudeHaiku4_5 + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking => 64_000, Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 @@ -418,6 +450,8 @@ impl Model { | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -443,6 +477,8 @@ impl Model { | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking | Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5 @@ -484,7 +520,9 @@ impl Model { | Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 - | Self::ClaudeOpus4_1Thinking => true, + | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking => true, // Custom models - check if they have cache configuration Self::Custom { @@ -506,7 +544,9 @@ impl Model { | Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking | Self::ClaudeOpus4_1 - | Self::ClaudeOpus4_1Thinking => Some(BedrockModelCacheConfiguration { + | Self::ClaudeOpus4_1Thinking + | Self::ClaudeOpus4_5 + | Self::ClaudeOpus4_5Thinking => Some(BedrockModelCacheConfiguration { max_cache_anchors: 4, min_total_token: 1024, }), @@ -535,50 +575,109 @@ impl Model { budget_tokens: Some(4096), } } - Model::ClaudeOpus4Thinking | Model::ClaudeOpus4_1Thinking => { - BedrockModelMode::Thinking { - budget_tokens: Some(4096), - } - } + Model::ClaudeOpus4Thinking + | Model::ClaudeOpus4_1Thinking + | Model::ClaudeOpus4_5Thinking => BedrockModelMode::Thinking { + budget_tokens: Some(4096), + }, _ => BedrockModelMode::Default, } } - pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result { + pub fn cross_region_inference_id( + &self, + region: &str, + allow_global: bool, + ) -> anyhow::Result { + // List derived from here: + // https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system + let model_id = self.request_id(); + + let supports_global = matches!( + self, + Model::ClaudeOpus4_5 + | Model::ClaudeOpus4_5Thinking + | Model::ClaudeHaiku4_5 + | Model::ClaudeSonnet4 + | Model::ClaudeSonnet4Thinking + | Model::ClaudeSonnet4_5 + | Model::ClaudeSonnet4_5Thinking + ); + let region_group = if region.starts_with("us-gov-") { "us-gov" - } else if region.starts_with("us-") { - "us" + } else if region.starts_with("us-") + || region.starts_with("ca-") + || region.starts_with("sa-") + { + if allow_global && supports_global { + "global" + } else { + "us" + } } else if region.starts_with("eu-") { - "eu" + if allow_global && supports_global { + "global" + } else { + "eu" + } } else if region.starts_with("ap-") || region == "me-central-1" || region == "me-south-1" { - "apac" - } else if region.starts_with("ca-") || region.starts_with("sa-") { - // Canada and South America regions - default to US profiles - "us" + if allow_global && supports_global { + "global" + } else { + "apac" + } } else { anyhow::bail!("Unsupported Region {region}"); }; - let model_id = self.request_id(); + match (self, region_group, region) { + (Model::Custom { .. }, _, _) => Ok(self.request_id().into()), + + ( + Model::ClaudeOpus4_5 + | Model::ClaudeOpus4_5Thinking + | Model::ClaudeHaiku4_5 + | Model::ClaudeSonnet4 + | Model::ClaudeSonnet4Thinking + | Model::ClaudeSonnet4_5 + | Model::ClaudeSonnet4_5Thinking, + "global", + _, + ) => Ok(format!("{}.{}", region_group, model_id)), - match (self, region_group) { - // Custom models can't have CRI IDs - (Model::Custom { .. }, _) => Ok(self.request_id().into()), + ( + Model::Claude3Haiku + | Model::Claude3_5Sonnet + | Model::Claude3_7Sonnet + | Model::Claude3_7SonnetThinking + | Model::ClaudeSonnet4_5 + | Model::ClaudeSonnet4_5Thinking, + "us-gov", + _, + ) => Ok(format!("{}.{}", region_group, model_id)), - // Models with US Gov only - (Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => { - Ok(format!("{}.{}", region_group, model_id)) - } + ( + Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking, + "apac", + "ap-southeast-2" | "ap-southeast-4", + ) => Ok(format!("au.{}", model_id)), - // Available everywhere - (Model::AmazonNovaLite | Model::AmazonNovaMicro | Model::AmazonNovaPro, _) => { - Ok(format!("{}.{}", region_group, model_id)) + ( + Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking, + "apac", + "ap-northeast-1" | "ap-northeast-3", + ) => Ok(format!("jp.{}", model_id)), + + (Model::AmazonNovaLite, "us", r) if r.starts_with("ca-") => { + Ok(format!("ca.{}", model_id)) } - // Models in US ( Model::AmazonNovaPremier + | Model::AmazonNovaLite + | Model::AmazonNovaMicro + | Model::AmazonNovaPro | Model::Claude3_5Haiku | Model::ClaudeHaiku4_5 | Model::Claude3_5Sonnet @@ -593,6 +692,8 @@ impl Model { | Model::ClaudeOpus4Thinking | Model::ClaudeOpus4_1 | Model::ClaudeOpus4_1Thinking + | Model::ClaudeOpus4_5 + | Model::ClaudeOpus4_5Thinking | Model::Claude3Haiku | Model::Claude3Opus | Model::Claude3Sonnet @@ -613,16 +714,18 @@ impl Model { | Model::PalmyraWriterX4 | Model::PalmyraWriterX5, "us", + _, ) => Ok(format!("{}.{}", region_group, model_id)), - // Models available in EU ( - Model::Claude3_5Sonnet + Model::AmazonNovaLite + | Model::AmazonNovaMicro + | Model::AmazonNovaPro + | Model::Claude3_5Sonnet | Model::ClaudeHaiku4_5 | Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking | Model::ClaudeSonnet4 - | Model::ClaudeSonnet4Thinking | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking | Model::Claude3Haiku @@ -631,26 +734,26 @@ impl Model { | Model::MetaLlama323BInstructV1 | Model::MistralPixtralLarge2502V1, "eu", + _, ) => Ok(format!("{}.{}", region_group, model_id)), - // Models available in APAC ( - Model::Claude3_5Sonnet + Model::AmazonNovaLite + | Model::AmazonNovaMicro + | Model::AmazonNovaPro + | Model::Claude3_5Sonnet | Model::Claude3_5SonnetV2 | Model::ClaudeHaiku4_5 - | Model::Claude3Haiku - | Model::Claude3Sonnet | Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking | Model::ClaudeSonnet4 - | Model::ClaudeSonnet4Thinking - | Model::ClaudeSonnet4_5 - | Model::ClaudeSonnet4_5Thinking, + | Model::Claude3Haiku + | Model::Claude3Sonnet, "apac", + _, ) => Ok(format!("{}.{}", region_group, model_id)), - // Any other combination is not supported - _ => Ok(self.request_id().into()), + _ => Ok(model_id.into()), } } } @@ -663,15 +766,15 @@ mod tests { fn test_us_region_inference_ids() -> anyhow::Result<()> { // Test US regions assert_eq!( - Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1")?, + Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1", false)?, "us.anthropic.claude-3-5-sonnet-20241022-v2:0" ); assert_eq!( - Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2")?, + Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2", false)?, "us.anthropic.claude-3-5-sonnet-20241022-v2:0" ); assert_eq!( - Model::AmazonNovaPro.cross_region_inference_id("us-east-2")?, + Model::AmazonNovaPro.cross_region_inference_id("us-east-2", false)?, "us.amazon.nova-pro-v1:0" ); Ok(()) @@ -681,19 +784,19 @@ mod tests { fn test_eu_region_inference_ids() -> anyhow::Result<()> { // Test European regions assert_eq!( - Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?, + Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1", false)?, "eu.anthropic.claude-sonnet-4-20250514-v1:0" ); assert_eq!( - Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?, + Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", false)?, "eu.anthropic.claude-sonnet-4-5-20250929-v1:0" ); assert_eq!( - Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?, + Model::Claude3Sonnet.cross_region_inference_id("eu-west-1", false)?, "eu.anthropic.claude-3-sonnet-20240229-v1:0" ); assert_eq!( - Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1")?, + Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1", false)?, "eu.amazon.nova-micro-v1:0" ); Ok(()) @@ -703,15 +806,15 @@ mod tests { fn test_apac_region_inference_ids() -> anyhow::Result<()> { // Test Asia-Pacific regions assert_eq!( - Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1")?, + Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1", false)?, "apac.anthropic.claude-3-5-sonnet-20241022-v2:0" ); assert_eq!( - Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2")?, + Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2", false)?, "apac.anthropic.claude-3-5-sonnet-20241022-v2:0" ); assert_eq!( - Model::AmazonNovaLite.cross_region_inference_id("ap-south-1")?, + Model::AmazonNovaLite.cross_region_inference_id("ap-south-1", false)?, "apac.amazon.nova-lite-v1:0" ); Ok(()) @@ -721,11 +824,11 @@ mod tests { fn test_gov_region_inference_ids() -> anyhow::Result<()> { // Test Government regions assert_eq!( - Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1")?, + Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1", false)?, "us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0" ); assert_eq!( - Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1")?, + Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1", false)?, "us-gov.anthropic.claude-3-haiku-20240307-v1:0" ); Ok(()) @@ -735,15 +838,15 @@ mod tests { fn test_meta_models_inference_ids() -> anyhow::Result<()> { // Test Meta models assert_eq!( - Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?, + Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1", false)?, "meta.llama3-70b-instruct-v1:0" ); assert_eq!( - Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1")?, + Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1", false)?, "us.meta.llama3-1-70b-instruct-v1:0" ); assert_eq!( - Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?, + Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1", false)?, "eu.meta.llama3-2-1b-instruct-v1:0" ); Ok(()) @@ -754,11 +857,11 @@ mod tests { // Mistral models don't follow the regional prefix pattern, // so they should return their original IDs assert_eq!( - Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1")?, + Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1", false)?, "mistral.mistral-large-2402-v1:0" ); assert_eq!( - Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1")?, + Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1", false)?, "mistral.mixtral-8x7b-instruct-v0:1" ); Ok(()) @@ -769,11 +872,11 @@ mod tests { // AI21 models don't follow the regional prefix pattern, // so they should return their original IDs assert_eq!( - Model::AI21J2UltraV1.cross_region_inference_id("us-east-1")?, + Model::AI21J2UltraV1.cross_region_inference_id("us-east-1", false)?, "ai21.j2-ultra-v1" ); assert_eq!( - Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1")?, + Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1", false)?, "ai21.jamba-instruct-v1:0" ); Ok(()) @@ -784,11 +887,11 @@ mod tests { // Cohere models don't follow the regional prefix pattern, // so they should return their original IDs assert_eq!( - Model::CohereCommandRV1.cross_region_inference_id("us-east-1")?, + Model::CohereCommandRV1.cross_region_inference_id("us-east-1", false)?, "cohere.command-r-v1:0" ); assert_eq!( - Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1")?, + Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1", false)?, "cohere.command-text-v14:7:4k" ); Ok(()) @@ -808,10 +911,17 @@ mod tests { // Custom model should return its name unchanged assert_eq!( - custom_model.cross_region_inference_id("us-east-1")?, + custom_model.cross_region_inference_id("us-east-1", false)?, "custom.my-model-v1:0" ); + // Test that models without global support fall back to regional when allow_global is true + assert_eq!( + Model::AmazonNovaPro.cross_region_inference_id("us-east-1", true)?, + "us.amazon.nova-pro-v1:0", + "Nova Pro should fall back to regional profile even when allow_global is true" + ); + Ok(()) } @@ -850,3 +960,28 @@ mod tests { ); } } + +#[test] +fn test_global_inference_ids() -> anyhow::Result<()> { + // Test global inference for models that support it when allow_global is true + assert_eq!( + Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", true)?, + "global.anthropic.claude-sonnet-4-20250514-v1:0" + ); + assert_eq!( + Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", true)?, + "global.anthropic.claude-sonnet-4-5-20250929-v1:0" + ); + assert_eq!( + Model::ClaudeHaiku4_5.cross_region_inference_id("ap-south-1", true)?, + "global.anthropic.claude-haiku-4-5-20251001-v1:0" + ); + + // Test that regional prefix is used when allow_global is false + assert_eq!( + Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", false)?, + "us.anthropic.claude-sonnet-4-20250514-v1:0" + ); + + Ok(()) +} diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 7664de3c87673a405118911526cb6606a2fecacf..00c1c0939bbfaf18ea6d0550633f2ae05e16ef25 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -123,7 +123,7 @@ impl Render for Breadcrumbs { .upgrade() .zip(zed_actions::outline::TOGGLE_OUTLINE.get()) { - callback(editor.to_any(), window, cx); + callback(editor.to_any_view(), window, cx); } } }) diff --git a/crates/buffer_diff/Cargo.toml b/crates/buffer_diff/Cargo.toml index 1be21f3a0f1ef7aafa222a611d858f8adb097454..6249ae418c593f5ae8bca3408d8f5f25df7c871b 100644 --- a/crates/buffer_diff/Cargo.toml +++ b/crates/buffer_diff/Cargo.toml @@ -12,7 +12,7 @@ workspace = true path = "src/buffer_diff.rs" [features] -test-support = [] +test-support = ["settings"] [dependencies] anyhow.workspace = true @@ -24,6 +24,7 @@ language.workspace = true log.workspace = true pretty_assertions.workspace = true rope.workspace = true +settings = { workspace = true, optional = true } sum_tree.workspace = true text.workspace = true util.workspace = true @@ -33,6 +34,7 @@ ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } rand.workspace = true serde_json.workspace = true +settings.workspace = true text = { workspace = true, features = ["test-support"] } unindent.workspace = true zlog.workspace = true diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index d6ae5545200bb47976554814e346be3039fa276e..22525096d3cbca456aa114b5acc9b4239b570dda 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1,7 +1,10 @@ use futures::channel::oneshot; use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel}; -use language::{Language, LanguageRegistry}; +use language::{ + BufferRow, DiffOptions, File, Language, LanguageName, LanguageRegistry, + language_settings::language_settings, word_diff_ranges, +}; use rope::Rope; use std::{ cmp::Ordering, @@ -11,14 +14,16 @@ use std::{ sync::{Arc, LazyLock}, }; use sum_tree::SumTree; -use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _}; +use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _, ToPoint as _}; use util::ResultExt; pub static CALCULATE_DIFF_TASK: LazyLock = LazyLock::new(TaskLabel::new); +pub const MAX_WORD_DIFF_LINE_COUNT: usize = 5; pub struct BufferDiff { pub buffer_id: BufferId, inner: BufferDiffInner, + // diff of the index vs head secondary_diff: Option>, } @@ -31,6 +36,7 @@ pub struct BufferDiffSnapshot { #[derive(Clone)] struct BufferDiffInner { hunks: SumTree, + // Used for making staging mo pending_hunks: SumTree, base_text: language::BufferSnapshot, base_text_exists: bool, @@ -50,11 +56,18 @@ pub enum DiffHunkStatusKind { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +/// Diff of Working Copy vs Index +/// aka 'is this hunk staged or not' pub enum DiffHunkSecondaryStatus { + /// Unstaged HasSecondaryHunk, + /// Partially staged OverlapsWithSecondaryHunk, + /// Staged NoSecondaryHunk, + /// We are unstaging SecondaryHunkAdditionPending, + /// We are stagind SecondaryHunkRemovalPending, } @@ -68,6 +81,10 @@ pub struct DiffHunk { /// The range in the buffer's diff base text to which this hunk corresponds. pub diff_base_byte_range: Range, pub secondary_status: DiffHunkSecondaryStatus, + // Anchors representing the word diff locations in the active buffer + pub buffer_word_diffs: Vec>, + // Offsets relative to the start of the deleted diff that represent word diff locations + pub base_word_diffs: Vec>, } /// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range. @@ -75,6 +92,8 @@ pub struct DiffHunk { struct InternalDiffHunk { buffer_range: Range, diff_base_byte_range: Range, + base_word_diffs: Vec>, + buffer_word_diffs: Vec>, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -88,6 +107,7 @@ struct PendingHunk { #[derive(Debug, Clone)] pub struct DiffHunkSummary { buffer_range: Range, + diff_base_byte_range: Range, } impl sum_tree::Item for InternalDiffHunk { @@ -96,6 +116,7 @@ impl sum_tree::Item for InternalDiffHunk { fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), + diff_base_byte_range: self.diff_base_byte_range.clone(), } } } @@ -106,6 +127,7 @@ impl sum_tree::Item for PendingHunk { fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), + diff_base_byte_range: self.diff_base_byte_range.clone(), } } } @@ -116,6 +138,7 @@ impl sum_tree::Summary for DiffHunkSummary { fn zero(_cx: Self::Context<'_>) -> Self { DiffHunkSummary { buffer_range: Anchor::MIN..Anchor::MIN, + diff_base_byte_range: 0..0, } } @@ -125,6 +148,15 @@ impl sum_tree::Summary for DiffHunkSummary { .start .min(&other.buffer_range.start, buffer); self.buffer_range.end = *self.buffer_range.end.max(&other.buffer_range.end, buffer); + + self.diff_base_byte_range.start = self + .diff_base_byte_range + .start + .min(other.diff_base_byte_range.start); + self.diff_base_byte_range.end = self + .diff_base_byte_range + .end + .max(other.diff_base_byte_range.end); } } @@ -147,11 +179,16 @@ impl std::fmt::Debug for BufferDiffInner { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("BufferDiffSnapshot") .field("hunks", &self.hunks) + .field("remote_id", &self.base_text.remote_id()) .finish() } } impl BufferDiffSnapshot { + pub fn buffer_diff_id(&self) -> BufferId { + self.inner.base_text.remote_id() + } + fn empty(buffer: &text::BufferSnapshot, cx: &mut App) -> BufferDiffSnapshot { BufferDiffSnapshot { inner: BufferDiffInner { @@ -190,6 +227,13 @@ impl BufferDiffSnapshot { let base_text_pair; let base_text_exists; let base_text_snapshot; + let diff_options = build_diff_options( + None, + language.as_ref().map(|l| l.name()), + language.as_ref().map(|l| l.default_scope()), + cx, + ); + if let Some(text) = &base_text { let base_text_rope = Rope::from(text.as_str()); base_text_pair = Some((text.clone(), base_text_rope.clone())); @@ -207,7 +251,7 @@ impl BufferDiffSnapshot { .background_executor() .spawn_labeled(*CALCULATE_DIFF_TASK, { let buffer = buffer.clone(); - async move { compute_hunks(base_text_pair, buffer) } + async move { compute_hunks(base_text_pair, buffer, diff_options) } }); async move { @@ -230,6 +274,12 @@ impl BufferDiffSnapshot { base_text_snapshot: language::BufferSnapshot, cx: &App, ) -> impl Future + use<> { + let diff_options = build_diff_options( + base_text_snapshot.file(), + base_text_snapshot.language().map(|l| l.name()), + base_text_snapshot.language().map(|l| l.default_scope()), + cx, + ); let base_text_exists = base_text.is_some(); let base_text_pair = base_text.map(|text| { debug_assert_eq!(&*text, &base_text_snapshot.text()); @@ -241,7 +291,7 @@ impl BufferDiffSnapshot { inner: BufferDiffInner { base_text: base_text_snapshot, pending_hunks: SumTree::new(&buffer), - hunks: compute_hunks(base_text_pair, buffer), + hunks: compute_hunks(base_text_pair, buffer, diff_options), base_text_exists, }, secondary_diff: None, @@ -300,6 +350,54 @@ impl BufferDiffSnapshot { let (new_id, new_empty) = (right.remote_id(), right.is_empty()); new_id == old_id || (new_empty && old_empty) } + + pub fn row_to_base_text_row(&self, row: BufferRow, buffer: &text::BufferSnapshot) -> u32 { + // TODO(split-diff) expose a parameter to reuse a cursor to avoid repeatedly seeking from the start + + // Find the last hunk that starts before this position. + let mut cursor = self.inner.hunks.cursor::(buffer); + let position = buffer.anchor_before(Point::new(row, 0)); + cursor.seek(&position, Bias::Left); + if cursor + .item() + .is_none_or(|hunk| hunk.buffer_range.start.cmp(&position, buffer).is_gt()) + { + cursor.prev(); + } + + let unclipped_point = if let Some(hunk) = cursor.item() + && hunk.buffer_range.start.cmp(&position, buffer).is_le() + { + let mut unclipped_point = cursor + .end() + .diff_base_byte_range + .end + .to_point(self.base_text()); + if position.cmp(&cursor.end().buffer_range.end, buffer).is_ge() { + unclipped_point += + Point::new(row, 0) - cursor.end().buffer_range.end.to_point(buffer); + } + // Move the cursor so that at the next step we can clip with the start of the next hunk. + cursor.next(); + unclipped_point + } else { + // Position is before the added region for the first hunk. + debug_assert!(self.inner.hunks.first().is_none_or(|first_hunk| { + position.cmp(&first_hunk.buffer_range.start, buffer).is_le() + })); + Point::new(row, 0) + }; + + let max_point = if let Some(next_hunk) = cursor.item() { + next_hunk + .diff_base_byte_range + .start + .to_point(self.base_text()) + } else { + self.base_text().max_point() + }; + unclipped_point.min(max_point).row + } } impl BufferDiffInner { @@ -339,7 +437,7 @@ impl BufferDiffInner { }; let hunk = PendingHunk { - buffer_range: Anchor::MIN..Anchor::MAX, + buffer_range: Anchor::min_max_range_for_buffer(buffer.remote_id()), diff_base_byte_range: 0..index_text.map_or(0, |rope| rope.len()), buffer_version: buffer.version().clone(), new_status, @@ -536,11 +634,15 @@ impl BufferDiffInner { [ ( &hunk.buffer_range.start, - (hunk.buffer_range.start, hunk.diff_base_byte_range.start), + ( + hunk.buffer_range.start, + hunk.diff_base_byte_range.start, + hunk, + ), ), ( &hunk.buffer_range.end, - (hunk.buffer_range.end, hunk.diff_base_byte_range.end), + (hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk), ), ] }); @@ -559,8 +661,11 @@ impl BufferDiffInner { let mut summaries = buffer.summaries_for_anchors_with_payload::(anchor_iter); iter::from_fn(move || { loop { - let (start_point, (start_anchor, start_base)) = summaries.next()?; - let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?; + let (start_point, (start_anchor, start_base, hunk)) = summaries.next()?; + let (mut end_point, (mut end_anchor, end_base, _)) = summaries.next()?; + + let base_word_diffs = hunk.base_word_diffs.clone(); + let buffer_word_diffs = hunk.buffer_word_diffs.clone(); if !start_anchor.is_valid(buffer) { continue; @@ -630,6 +735,8 @@ impl BufferDiffInner { range: start_point..end_point, diff_base_byte_range: start_base..end_base, buffer_range: start_anchor..end_anchor, + base_word_diffs, + buffer_word_diffs, secondary_status, }); } @@ -661,6 +768,8 @@ impl BufferDiffInner { buffer_range: hunk.buffer_range.clone(), // The secondary status is not used by callers of this method. secondary_status: DiffHunkSecondaryStatus::NoSecondaryHunk, + base_word_diffs: hunk.base_word_diffs.clone(), + buffer_word_diffs: hunk.buffer_word_diffs.clone(), }) }) } @@ -729,9 +838,36 @@ impl BufferDiffInner { } } +fn build_diff_options( + file: Option<&Arc>, + language: Option, + language_scope: Option, + cx: &App, +) -> Option { + #[cfg(any(test, feature = "test-support"))] + { + if !cx.has_global::() { + return Some(DiffOptions { + language_scope, + max_word_diff_line_count: MAX_WORD_DIFF_LINE_COUNT, + ..Default::default() + }); + } + } + + language_settings(language, file, cx) + .word_diff_enabled + .then_some(DiffOptions { + language_scope, + max_word_diff_line_count: MAX_WORD_DIFF_LINE_COUNT, + ..Default::default() + }) +} + fn compute_hunks( diff_base: Option<(Arc, Rope)>, buffer: text::BufferSnapshot, + diff_options: Option, ) -> SumTree { let mut tree = SumTree::new(&buffer); @@ -757,6 +893,8 @@ fn compute_hunks( InternalDiffHunk { buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), diff_base_byte_range: 0..diff_base.len() - 1, + base_word_diffs: Vec::default(), + buffer_word_diffs: Vec::default(), }, &buffer, ); @@ -772,6 +910,7 @@ fn compute_hunks( &diff_base_rope, &buffer, &mut divergence, + diff_options.as_ref(), ); tree.push(hunk, &buffer); } @@ -779,8 +918,10 @@ fn compute_hunks( } else { tree.push( InternalDiffHunk { - buffer_range: Anchor::MIN..Anchor::MAX, + buffer_range: Anchor::min_max_range_for_buffer(buffer.remote_id()), diff_base_byte_range: 0..0, + base_word_diffs: Vec::default(), + buffer_word_diffs: Vec::default(), }, &buffer, ); @@ -795,6 +936,7 @@ fn process_patch_hunk( diff_base: &Rope, buffer: &text::BufferSnapshot, buffer_row_divergence: &mut i64, + diff_options: Option<&DiffOptions>, ) -> InternalDiffHunk { let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); assert!(line_item_count > 0); @@ -859,9 +1001,49 @@ fn process_patch_hunk( let start = Point::new(buffer_row_range.start, 0); let end = Point::new(buffer_row_range.end, 0); let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); + + let base_line_count = line_item_count.saturating_sub(buffer_row_range.len()); + + let (base_word_diffs, buffer_word_diffs) = if let Some(diff_options) = diff_options + && !buffer_row_range.is_empty() + && base_line_count == buffer_row_range.len() + && diff_options.max_word_diff_line_count >= base_line_count + { + let base_text: String = diff_base + .chunks_in_range(diff_base_byte_range.clone()) + .collect(); + + let buffer_text: String = buffer.text_for_range(buffer_range.clone()).collect(); + + let (base_word_diffs, buffer_word_diffs_relative) = word_diff_ranges( + &base_text, + &buffer_text, + DiffOptions { + language_scope: diff_options.language_scope.clone(), + ..*diff_options + }, + ); + + let buffer_start_offset = buffer_range.start.to_offset(buffer); + let buffer_word_diffs = buffer_word_diffs_relative + .into_iter() + .map(|range| { + let start = buffer.anchor_after(buffer_start_offset + range.start); + let end = buffer.anchor_after(buffer_start_offset + range.end); + start..end + }) + .collect(); + + (base_word_diffs, buffer_word_diffs) + } else { + (Vec::default(), Vec::default()) + }; + InternalDiffHunk { buffer_range, diff_base_byte_range, + base_word_diffs, + buffer_word_diffs, } } @@ -940,10 +1122,11 @@ impl BufferDiff { pub fn clear_pending_hunks(&mut self, cx: &mut Context) { if self.secondary_diff.is_some() { self.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary { - buffer_range: Anchor::MIN..Anchor::MIN, + buffer_range: Anchor::min_min_range_for_buffer(self.buffer_id), + diff_base_byte_range: 0..0, }); cx.emit(BufferDiffEvent::DiffChanged { - changed_range: Some(Anchor::MIN..Anchor::MAX), + changed_range: Some(Anchor::min_max_range_for_buffer(self.buffer_id)), }); } } @@ -1064,7 +1247,10 @@ impl BufferDiff { { (false, new_state.compare(state, buffer)) } - _ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)), + _ => ( + true, + Some(text::Anchor::min_max_range_for_buffer(self.buffer_id)), + ), }; if let Some(secondary_changed_range) = secondary_diff_change @@ -1125,7 +1311,11 @@ impl BufferDiff { buffer_snapshot: &'a text::BufferSnapshot, cx: &'a App, ) -> impl 'a + Iterator { - self.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer_snapshot, cx) + self.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer_snapshot.remote_id()), + buffer_snapshot, + cx, + ) } pub fn hunks_intersecting_range<'a>( @@ -1221,7 +1411,9 @@ impl BufferDiff { impl DiffHunk { pub fn is_created_file(&self) -> bool { - self.diff_base_byte_range == (0..0) && self.buffer_range == (Anchor::MIN..Anchor::MAX) + self.diff_base_byte_range == (0..0) + && self.buffer_range.start.is_min() + && self.buffer_range.end.is_max() } pub fn status(&self) -> DiffHunkStatus { @@ -1388,7 +1580,10 @@ mod tests { let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer.remote_id()), + &buffer, + ), &buffer, &diff_base, &[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())], @@ -1397,7 +1592,10 @@ mod tests { buffer.edit([(0..0, "point five\n")]); diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer.remote_id()), + &buffer, + ), &buffer, &diff_base, &[ @@ -1408,7 +1606,10 @@ mod tests { diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx)); assert_hunks::<&str, _>( - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer.remote_id()), + &buffer, + ), &buffer, &diff_base, &[], @@ -1482,7 +1683,10 @@ mod tests { ]; assert_hunks( - uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), + uncommitted_diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer.remote_id()), + &buffer, + ), &buffer, &head_text, &expected_hunks, @@ -1541,8 +1745,11 @@ mod tests { }) .await; assert_eq!( - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer) - .count(), + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(buffer.remote_id()), + &buffer + ) + .count(), 8 ); @@ -1948,7 +2155,7 @@ mod tests { let range = diff_1.inner.compare(&empty_diff.inner, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0)); - // Edit does not affect the diff. + // Edit does affects the diff because it recalculates word diffs. buffer.edit_via_marked_text( &" one @@ -1963,7 +2170,14 @@ mod tests { .unindent(), ); let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); - assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer)); + assert_eq!( + Point::new(4, 0)..Point::new(5, 0), + diff_2 + .inner + .compare(&diff_1.inner, &buffer) + .unwrap() + .to_point(&buffer) + ); // Edit turns a deletion hunk into a modification. buffer.edit_via_marked_text( @@ -2154,8 +2368,12 @@ mod tests { let mut diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx); let mut hunks = diff.update(cx, |diff, cx| { - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx) - .collect::>() + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(diff.buffer_id), + &working_copy, + cx, + ) + .collect::>() }); if hunks.is_empty() { return; @@ -2184,8 +2402,12 @@ mod tests { diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx); let found_hunks = diff.update(cx, |diff, cx| { - diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx) - .collect::>() + diff.hunks_intersecting_range( + Anchor::min_max_range_for_buffer(diff.buffer_id), + &working_copy, + cx, + ) + .collect::>() }); assert_eq!(hunks.len(), found_hunks.len()); @@ -2203,4 +2425,62 @@ mod tests { hunks = found_hunks; } } + + #[gpui::test] + async fn test_row_to_base_text_row(cx: &mut TestAppContext) { + let base_text = " + zero + one + two + three + four + five + six + seven + eight + " + .unindent(); + let buffer_text = " + zero + ONE + two + NINE + five + seven + " + .unindent(); + + // zero + // - one + // + ONE + // two + // - three + // - four + // + NINE + // five + // - six + // seven + // + eight + + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); + let buffer_snapshot = buffer.snapshot(); + let diff = BufferDiffSnapshot::new_sync(buffer_snapshot.clone(), base_text, cx); + let expected_results = [ + // don't format me + (0, 0), + (1, 2), + (2, 2), + (3, 5), + (4, 5), + (5, 7), + (6, 9), + ]; + for (buffer_row, expected) in expected_results { + assert_eq!( + diff.row_to_base_text_row(buffer_row, &buffer_snapshot), + expected, + "{buffer_row}" + ); + } + } } diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index e659d1cf05b228423796d4c48906d568d71770d9..ccc8c067c25a91aa44c01911be89c21f0ea9367c 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -305,6 +305,7 @@ impl Room { pub(crate) fn leave(&mut self, cx: &mut Context) -> Task> { cx.notify(); + self.emit_video_track_unsubscribed_events(cx); self.leave_internal(cx) } @@ -352,6 +353,14 @@ impl Room { self.maintain_connection.take(); } + fn emit_video_track_unsubscribed_events(&self, cx: &mut Context) { + for participant in self.remote_participants.values() { + for sid in participant.video_tracks.keys() { + cx.emit(Event::RemoteVideoTrackUnsubscribed { sid: sid.clone() }); + } + } + } + async fn maintain_connection( this: WeakEntity, client: Arc, @@ -524,6 +533,16 @@ impl Room { self.id } + pub fn room_id(&self) -> impl Future> + 'static { + let room = self.live_kit.as_ref().map(|lk| lk.room.clone()); + async move { + let room = room?; + let sid = room.sid().await; + let name = room.name(); + Some(format!("{} (sid: {sid})", name)) + } + } + pub fn status(&self) -> RoomStatus { self.status } @@ -872,6 +891,9 @@ impl Room { project_id: project.id, }); } + for sid in participant.video_tracks.keys() { + cx.emit(Event::RemoteVideoTrackUnsubscribed { sid: sid.clone() }); + } false } }); @@ -1683,7 +1705,9 @@ impl LiveKitRoom { } } +#[derive(Default)] enum LocalTrack { + #[default] None, Pending { publish_id: usize, @@ -1694,12 +1718,6 @@ enum LocalTrack { }, } -impl Default for LocalTrack { - fn default() -> Self { - Self::None - } -} - #[derive(Copy, Clone, PartialEq, Eq)] pub enum RoomStatus { Online, diff --git a/crates/channel/Cargo.toml b/crates/channel/Cargo.toml index 43af27ac8b6f21d4e1e16c9102da3de9c0585db4..a8664da8e93738fc21241e8185a9747bd405f469 100644 --- a/crates/channel/Cargo.toml +++ b/crates/channel/Cargo.toml @@ -37,6 +37,7 @@ collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } +semver.workspace = true settings = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/channel/src/channel_store_tests.rs b/crates/channel/src/channel_store_tests.rs index c3a6e80955605be096ba9b1cdb6975b5ab2ee389..f1f9d23a99f25f14385a061c0732869c21f160f1 100644 --- a/crates/channel/src/channel_store_tests.rs +++ b/crates/channel/src/channel_store_tests.rs @@ -1,7 +1,7 @@ use super::*; use client::{Client, UserStore}; use clock::FakeSystemClock; -use gpui::{App, AppContext as _, Entity, SemanticVersion}; +use gpui::{App, AppContext as _, Entity}; use http_client::FakeHttpClient; use rpc::proto::{self}; use settings::SettingsStore; @@ -236,7 +236,7 @@ fn test_dangling_channel_paths(cx: &mut App) { fn init_test(cx: &mut App) -> Entity { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); let clock = Arc::new(FakeSystemClock::new()); let http = FakeHttpClient::with_404_response(); diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 54f7ec4f5315a6529579353f3aa489925534d4ba..63e99a3ed25fad919e1a86a3a1917e3617ac2737 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -34,6 +34,10 @@ util.workspace = true tempfile.workspace = true rayon.workspace = true +[dev-dependencies] +serde_json.workspace = true +util = { workspace = true, features = ["test-support"] } + [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] exec.workspace = true fork.workspace = true diff --git a/crates/cli/build.rs b/crates/cli/build.rs index 50ef631ebfbdc0628c7eacdac615a5e38811621f..a3c4bc643735d002eb1ce836cda3c86a0b99a5cb 100644 --- a/crates/cli/build.rs +++ b/crates/cli/build.rs @@ -23,4 +23,7 @@ fn main() { println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); } + if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") { + println!("cargo:rustc-env=ZED_BUILD_ID={build_identifier}"); + } } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index a16f5a3bab9849ee93abac4e2eccb602698b65de..e1a7a1481b56633364cb011f46cd55e616244f2c 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -12,7 +12,9 @@ use clap::Parser; use cli::{CliRequest, CliResponse, IpcHandshake, ipc::IpcOneShotServer}; use parking_lot::Mutex; use std::{ - env, fs, io, + env, + ffi::OsStr, + fs, io, path::{Path, PathBuf}, process::ExitStatus, sync::Arc, @@ -30,7 +32,7 @@ struct Detect; trait InstalledApp { fn zed_version_string(&self) -> String; - fn launch(&self, ipc_url: String) -> anyhow::Result<()>; + fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()>; fn run_foreground( &self, ipc_url: String, @@ -59,6 +61,8 @@ Examples: )] struct Args { /// Wait for all of the given paths to be opened/closed before exiting. + /// + /// When opening a directory, waits until the created window is closed. #[arg(short, long)] wait: bool, /// Add files to the currently open workspace @@ -129,37 +133,177 @@ struct Args { askpass: Option, } +/// Parses a path containing a position (e.g. `path:line:column`) +/// and returns its canonicalized string representation. +/// +/// If a part of path doesn't exist, it will canonicalize the +/// existing part and append the non-existing part. +/// +/// This method must return an absolute path, as many zed +/// crates assume absolute paths. fn parse_path_with_position(argument_str: &str) -> anyhow::Result { - let canonicalized = match Path::new(argument_str).canonicalize() { - Ok(existing_path) => PathWithPosition::from_path(existing_path), - Err(_) => { - let path = PathWithPosition::parse_str(argument_str); + match Path::new(argument_str).canonicalize() { + Ok(existing_path) => Ok(PathWithPosition::from_path(existing_path)), + Err(_) => PathWithPosition::parse_str(argument_str).map_path(|mut path| { let curdir = env::current_dir().context("retrieving current directory")?; - path.map_path(|path| match fs::canonicalize(&path) { - Ok(path) => Ok(path), - Err(e) => { - if let Some(mut parent) = path.parent() { - if parent == Path::new("") { - parent = &curdir - } - match fs::canonicalize(parent) { - Ok(parent) => Ok(parent.join(path.file_name().unwrap())), - Err(_) => Err(e), - } - } else { - Err(e) - } + let mut children = Vec::new(); + let root; + loop { + // canonicalize handles './', and '/'. + if let Ok(canonicalized) = fs::canonicalize(&path) { + root = canonicalized; + break; } - }) - } - .with_context(|| format!("parsing as path with position {argument_str}"))?, - }; - Ok(canonicalized.to_string(|path| path.to_string_lossy().into_owned())) + // The comparison to `curdir` is just a shortcut + // since we know it is canonical. The other one + // is if `argument_str` is a string that starts + // with a name (e.g. "foo/bar"). + if path == curdir || path == Path::new("") { + root = curdir; + break; + } + children.push( + path.file_name() + .with_context(|| format!("parsing as path with position {argument_str}"))? + .to_owned(), + ); + if !path.pop() { + unreachable!("parsing as path with position {argument_str}"); + } + } + Ok(children.iter().rev().fold(root, |mut path, child| { + path.push(child); + path + })) + }), + } + .map(|path_with_pos| path_with_pos.to_string(|path| path.to_string_lossy().into_owned())) +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + use util::path; + use util::paths::SanitizedPath; + use util::test::TempTree; + + macro_rules! assert_path_eq { + ($left:expr, $right:expr) => { + assert_eq!( + SanitizedPath::new(Path::new(&$left)), + SanitizedPath::new(Path::new(&$right)) + ) + }; + } + + fn cwd() -> PathBuf { + env::current_dir().unwrap() + } + + static CWD_LOCK: Mutex<()> = Mutex::new(()); + + fn with_cwd(path: &Path, f: impl FnOnce() -> anyhow::Result) -> anyhow::Result { + let _lock = CWD_LOCK.lock(); + let old_cwd = cwd(); + env::set_current_dir(path)?; + let result = f(); + env::set_current_dir(old_cwd)?; + result + } + + #[test] + fn test_parse_non_existing_path() { + // Absolute path + let result = parse_path_with_position(path!("/non/existing/path.txt")).unwrap(); + assert_path_eq!(result, path!("/non/existing/path.txt")); + + // Absolute path in cwd + let path = cwd().join(path!("non/existing/path.txt")); + let expected = path.to_string_lossy().to_string(); + let result = parse_path_with_position(&expected).unwrap(); + assert_path_eq!(result, expected); + + // Relative path + let result = parse_path_with_position(path!("non/existing/path.txt")).unwrap(); + assert_path_eq!(result, expected) + } + + #[test] + fn test_parse_existing_path() { + let temp_tree = TempTree::new(json!({ + "file.txt": "", + })); + let file_path = temp_tree.path().join("file.txt"); + let expected = file_path.to_string_lossy().to_string(); + + // Absolute path + let result = parse_path_with_position(file_path.to_str().unwrap()).unwrap(); + assert_path_eq!(result, expected); + + // Relative path + let result = with_cwd(temp_tree.path(), || parse_path_with_position("file.txt")).unwrap(); + assert_path_eq!(result, expected); + } + + // NOTE: + // While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus + // we assume that they are not supported out of the box. + #[cfg(not(windows))] + #[test] + fn test_parse_symlink_file() { + let temp_tree = TempTree::new(json!({ + "target.txt": "", + })); + let target_path = temp_tree.path().join("target.txt"); + let symlink_path = temp_tree.path().join("symlink.txt"); + std::os::unix::fs::symlink(&target_path, &symlink_path).unwrap(); + + // Absolute path + let result = parse_path_with_position(symlink_path.to_str().unwrap()).unwrap(); + assert_eq!(result, target_path.to_string_lossy()); + + // Relative path + let result = + with_cwd(temp_tree.path(), || parse_path_with_position("symlink.txt")).unwrap(); + assert_eq!(result, target_path.to_string_lossy()); + } + + #[cfg(not(windows))] + #[test] + fn test_parse_symlink_dir() { + let temp_tree = TempTree::new(json!({ + "some": { + "dir": { // symlink target + "ec": { + "tory": { + "file.txt": "", + }}}}})); + + let target_file_path = temp_tree.path().join("some/dir/ec/tory/file.txt"); + let expected = target_file_path.to_string_lossy(); + + let dir_path = temp_tree.path().join("some/dir"); + let symlink_path = temp_tree.path().join("symlink"); + std::os::unix::fs::symlink(&dir_path, &symlink_path).unwrap(); + + // Absolute path + let result = + parse_path_with_position(symlink_path.join("ec/tory/file.txt").to_str().unwrap()) + .unwrap(); + assert_eq!(result, expected); + + // Relative path + let result = with_cwd(temp_tree.path(), || { + parse_path_with_position("symlink/ec/tory/file.txt") + }) + .unwrap(); + assert_eq!(result, expected); + } } fn parse_path_in_wsl(source: &str, wsl: &str) -> Result { let mut source = PathWithPosition::parse_str(source); - let mut command = util::command::new_std_command("wsl.exe"); let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { if user.is_empty() { @@ -170,22 +314,35 @@ fn parse_path_in_wsl(source: &str, wsl: &str) -> Result { (None, wsl) }; + let mut args = vec!["--distribution", distro_name]; if let Some(user) = user { - command.arg("--user").arg(user); + args.push("--user"); + args.push(user); } - let output = command - .arg("--distribution") - .arg(distro_name) + let command = [ + OsStr::new("realpath"), + OsStr::new("-s"), + source.path.as_ref(), + ]; + + let output = util::command::new_std_command("wsl.exe") + .args(&args) .arg("--exec") - .arg("wslpath") - .arg("-m") - .arg(&source.path) + .args(&command) .output()?; + let result = if output.status.success() { + String::from_utf8_lossy(&output.stdout).to_string() + } else { + let fallback = util::command::new_std_command("wsl.exe") + .args(&args) + .arg("--") + .args(&command) + .output()?; + String::from_utf8_lossy(&fallback.stdout).to_string() + }; - let result = String::from_utf8_lossy(&output.stdout); - let prefix = format!("//wsl.localhost/{}", distro_name); - source.path = Path::new(result.trim().strip_prefix(&prefix).unwrap_or(&result)).to_owned(); + source.path = Path::new(result.trim()).to_owned(); Ok(source.to_string(|path| path.to_string_lossy().into_owned())) } @@ -433,7 +590,7 @@ fn main() -> Result<()> { if args.foreground { app.run_foreground(url, user_data_dir.as_deref())?; } else { - app.launch(url)?; + app.launch(url, user_data_dir.as_deref())?; sender.join().unwrap()?; if let Some(handle) = stdin_pipe_handle { handle.join().unwrap()?; @@ -554,14 +711,18 @@ mod linux { ) } - fn launch(&self, ipc_url: String) -> anyhow::Result<()> { - let sock_path = paths::data_dir().join(format!( + fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> { + let data_dir = user_data_dir + .map(PathBuf::from) + .unwrap_or_else(|| paths::data_dir().clone()); + + let sock_path = data_dir.join(format!( "zed-{}.sock", *release_channel::RELEASE_CHANNEL_NAME )); let sock = UnixDatagram::unbound()?; if sock.connect(&sock_path).is_err() { - self.boot_background(ipc_url)?; + self.boot_background(ipc_url, user_data_dir)?; } else { sock.send(ipc_url.as_bytes())?; } @@ -587,7 +748,11 @@ mod linux { } impl App { - fn boot_background(&self, ipc_url: String) -> anyhow::Result<()> { + fn boot_background( + &self, + ipc_url: String, + user_data_dir: Option<&str>, + ) -> anyhow::Result<()> { let path = &self.0; match fork::fork() { @@ -601,8 +766,13 @@ mod linux { if fork::close_fd().is_err() { eprintln!("failed to close_fd: {}", std::io::Error::last_os_error()); } - let error = - exec::execvp(path.clone(), &[path.as_os_str(), &OsString::from(ipc_url)]); + let mut args: Vec = + vec![path.as_os_str().to_owned(), OsString::from(ipc_url)]; + if let Some(dir) = user_data_dir { + args.push(OsString::from("--user-data-dir")); + args.push(OsString::from(dir)); + } + let error = exec::execvp(path.clone(), &args); // if exec succeeded, we never get here. eprintln!("failed to exec {:?}: {}", path, error); process::exit(1) @@ -788,11 +958,14 @@ mod windows { ) } - fn launch(&self, ipc_url: String) -> anyhow::Result<()> { + fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> { if check_single_instance() { - std::process::Command::new(self.0.clone()) - .arg(ipc_url) - .spawn()?; + let mut cmd = std::process::Command::new(self.0.clone()); + cmd.arg(ipc_url); + if let Some(dir) = user_data_dir { + cmd.arg("--user-data-dir").arg(dir); + } + cmd.spawn()?; } else { unsafe { let pipe = CreateFileW( @@ -941,7 +1114,7 @@ mod mac_os { format!("Zed {} – {}", self.version(), self.path().display(),) } - fn launch(&self, url: String) -> anyhow::Result<()> { + fn launch(&self, url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> { match self { Self::App { app_bundle, .. } => { let app_path = app_bundle; @@ -991,8 +1164,11 @@ mod mac_os { format!("Cloning descriptor for file {subprocess_stdout_file:?}") })?; let mut command = std::process::Command::new(executable); - let command = command - .env(FORCE_CLI_MODE_ENV_VAR_NAME, "") + command.env(FORCE_CLI_MODE_ENV_VAR_NAME, ""); + if let Some(dir) = user_data_dir { + command.arg("--user-data-dir").arg(dir); + } + command .stderr(subprocess_stdout_file) .stdout(subprocess_stdin_file) .arg(url); diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 513a73be4581f3b0c8069dde831cc6811f5e045b..50cf12b977a62d56bf9d4a036165917a5dfff2fc 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -53,7 +53,7 @@ text.workspace = true thiserror.workspace = true time.workspace = true tiny_http.workspace = true -tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] } +tokio-socks.workspace = true tokio.workspace = true url.workspace = true util.workspace = true @@ -70,6 +70,7 @@ settings = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } [target.'cfg(target_os = "windows")'.dependencies] +semver.workspace = true windows.workspace = true [target.'cfg(target_os = "macos")'.dependencies] diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 96b15dc9fb13deea3cdc706f1927c4d6f016b57a..801c8c3de8d3f02e3d73809df2c651c6973f231a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -150,9 +150,8 @@ pub fn init(client: &Arc, cx: &mut App) { .detach_and_log_err(cx); } } - }); - - cx.on_action({ + }) + .on_action({ let client = client.clone(); move |_: &SignOut, cx| { if let Some(client) = client.upgrade() { @@ -162,9 +161,8 @@ pub fn init(client: &Arc, cx: &mut App) { .detach(); } } - }); - - cx.on_action({ + }) + .on_action({ let client = client; move |_: &Reconnect, cx| { if let Some(client) = client.upgrade() { @@ -1723,28 +1721,68 @@ impl ProtoClient for Client { fn is_via_collab(&self) -> bool { true } + + fn has_wsl_interop(&self) -> bool { + false + } } /// prefix for the zed:// url scheme pub const ZED_URL_SCHEME: &str = "zed"; +/// A parsed Zed link that can be handled internally by the application. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ZedLink { + /// Join a channel: `zed.dev/channel/channel-name-123` or `zed://channel/channel-name-123` + Channel { channel_id: u64 }, + /// Open channel notes: `zed.dev/channel/channel-name-123/notes` or with heading `notes#heading` + ChannelNotes { + channel_id: u64, + heading: Option, + }, +} + /// Parses the given link into a Zed link. /// -/// Returns a [`Some`] containing the unprefixed link if the link is a Zed link. -/// Returns [`None`] otherwise. -pub fn parse_zed_link<'a>(link: &'a str, cx: &App) -> Option<&'a str> { +/// Returns a [`Some`] containing the parsed link if the link is a recognized Zed link +/// that should be handled internally by the application. +/// Returns [`None`] for links that should be opened in the browser. +pub fn parse_zed_link(link: &str, cx: &App) -> Option { let server_url = &ClientSettings::get_global(cx).server_url; - if let Some(stripped) = link + let path = link .strip_prefix(server_url) .and_then(|result| result.strip_prefix('/')) - { - return Some(stripped); + .or_else(|| { + link.strip_prefix(ZED_URL_SCHEME) + .and_then(|result| result.strip_prefix("://")) + })?; + + let mut parts = path.split('/'); + + if parts.next() != Some("channel") { + return None; } - if let Some(stripped) = link - .strip_prefix(ZED_URL_SCHEME) - .and_then(|result| result.strip_prefix("://")) - { - return Some(stripped); + + let slug = parts.next()?; + let id_str = slug.split('-').next_back()?; + let channel_id = id_str.parse::().ok()?; + + let Some(next) = parts.next() else { + return Some(ZedLink::Channel { channel_id }); + }; + + if let Some(heading) = next.strip_prefix("notes#") { + return Some(ZedLink::ChannelNotes { + channel_id, + heading: Some(heading.to_string()), + }); + } + + if next == "notes" { + return Some(ZedLink::ChannelNotes { + channel_id, + heading: None, + }); } None diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 50bd4ace80341cd2616d5bc88d5ab2475e094b8e..68b6c302fb20b1afe78a89dada745538d8150d0d 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -158,7 +158,7 @@ pub fn os_version() -> String { let mut info = unsafe { std::mem::zeroed() }; let status = unsafe { windows::Wdk::System::SystemServices::RtlGetVersion(&mut info) }; if status.is_ok() { - gpui::SemanticVersion::new( + semver::Version::new( info.dwMajorVersion as _, info.dwMinorVersion as _, info.dwBuildNumber as _, @@ -293,10 +293,11 @@ impl Telemetry { } pub fn metrics_enabled(self: &Arc) -> bool { - let state = self.state.lock(); - let enabled = state.settings.metrics; - drop(state); - enabled + self.state.lock().settings.metrics + } + + pub fn diagnostics_enabled(self: &Arc) -> bool { + self.state.lock().settings.diagnostics } pub fn set_authenticated_user_info( diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 525a3e960ce8bc2aede4b0665af23ab3c33cac15..37f0f3ec278d28279e8d75f5c0b64c75f69901bb 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -267,6 +267,7 @@ impl UserStore { Status::SignedOut => { current_user_tx.send(None).await.ok(); this.update(cx, |this, cx| { + this.clear_plan_and_usage(); cx.emit(Event::PrivateUserInfoUpdated); cx.notify(); this.clear_contacts() @@ -779,6 +780,12 @@ impl UserStore { cx.notify(); } + pub fn clear_plan_and_usage(&mut self) { + self.plan_info = None; + self.model_request_usage = None; + self.edit_prediction_usage = None; + } + fn update_authenticated_user( &mut self, response: GetAuthenticatedUserResponse, diff --git a/crates/client/src/zed_urls.rs b/crates/client/src/zed_urls.rs index 7193c099473c95794796c2fc4d3eaaf2f06eb1ac..2fe47251695446b54d6766c9a52bbd2da366d34e 100644 --- a/crates/client/src/zed_urls.rs +++ b/crates/client/src/zed_urls.rs @@ -51,3 +51,19 @@ pub fn external_agents_docs(cx: &App) -> String { server_url = server_url(cx) ) } + +/// Returns the URL to Zed agent servers documentation. +pub fn agent_server_docs(cx: &App) -> String { + format!( + "{server_url}/docs/extensions/agent-servers", + server_url = server_url(cx) + ) +} + +/// Returns the URL to Zed's edit prediction documentation. +pub fn edit_prediction_docs(cx: &App) -> String { + format!( + "{server_url}/docs/ai/edit-prediction", + server_url = server_url(cx) + ) +} diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index ff8275fe40eae6945691a7b8d315414617be0235..2c5b2649000bb071b9d206d9d2c204f1eea9bda1 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -58,6 +58,9 @@ pub const SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME: &str = /// The name of the header used by the client to indicate that it supports receiving xAI models. pub const CLIENT_SUPPORTS_X_AI_HEADER_NAME: &str = "x-zed-client-supports-x-ai"; +/// The maximum number of edit predictions that can be rejected per request. +pub const MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST: usize = 100; + #[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum UsageLimit { @@ -166,6 +169,17 @@ pub struct PredictEditsBody { /// Info about the git repository state, only present when can_collect_data is true. #[serde(skip_serializing_if = "Option::is_none", default)] pub git_info: Option, + /// The trigger for this request. + #[serde(default)] + pub trigger: PredictEditsRequestTrigger, +} + +#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize)] +pub enum PredictEditsRequestTrigger { + Diagnostics, + Cli, + #[default] + Other, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -192,6 +206,41 @@ pub struct AcceptEditPredictionBody { pub request_id: String, } +#[derive(Debug, Clone, Deserialize)] +pub struct RejectEditPredictionsBody { + pub rejections: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct RejectEditPredictionsBodyRef<'a> { + pub rejections: &'a [EditPredictionRejection], +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct EditPredictionRejection { + pub request_id: String, + #[serde(default)] + pub reason: EditPredictionRejectReason, + pub was_shown: bool, +} + +#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] +pub enum EditPredictionRejectReason { + /// New requests were triggered before this one completed + Canceled, + /// No edits returned + Empty, + /// Edits returned, but none remained after interpolation + InterpolatedEmpty, + /// The new prediction was preferred over the current one + Replaced, + /// The current prediction was preferred over the new one + CurrentPreferred, + /// The current prediction was discarded + #[default] + Discarded, +} + #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum CompletionMode { @@ -322,6 +371,8 @@ pub struct LanguageModel { pub supports_images: bool, pub supports_thinking: bool, pub supports_max_mode: bool, + #[serde(default)] + pub supports_streaming_tools: bool, // only used by OpenAI and xAI #[serde(default)] pub supports_parallel_tool_calls: bool, diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 98ca0748934d663d204c64544af8a3e83fcd704d..9e590dc4cf48a82ecdda8b007c38ab15f3b602be 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -3,13 +3,13 @@ use serde::{Deserialize, Serialize}; use std::{ fmt::{Display, Write as _}, ops::{Add, Range, Sub}, - path::{Path, PathBuf}, + path::Path, sync::Arc, }; use strum::EnumIter; use uuid::Uuid; -use crate::PredictEditsGitInfo; +use crate::{PredictEditsGitInfo, PredictEditsRequestTrigger}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PlanContextRetrievalRequest { @@ -17,7 +17,7 @@ pub struct PlanContextRetrievalRequest { pub excerpt_path: Arc, pub excerpt_line_range: Range, pub cursor_file_max_row: Line, - pub events: Vec, + pub events: Vec>, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -31,18 +31,10 @@ pub struct PredictEditsRequest { /// Within `signatures` pub excerpt_parent: Option, #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub included_files: Vec, - #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub signatures: Vec, - #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub referenced_declarations: Vec, - pub events: Vec, + pub related_files: Vec, + pub events: Vec>, #[serde(default)] pub can_collect_data: bool, - #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub diagnostic_groups: Vec, - #[serde(skip_serializing_if = "is_default", default)] - pub diagnostic_groups_truncated: bool, /// Info about the git repository state, only present when can_collect_data is true. #[serde(skip_serializing_if = "Option::is_none", default)] pub git_info: Option, @@ -53,10 +45,12 @@ pub struct PredictEditsRequest { pub prompt_max_bytes: Option, #[serde(default)] pub prompt_format: PromptFormat, + #[serde(default)] + pub trigger: PredictEditsRequestTrigger, } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct IncludedFile { +pub struct RelatedFile { pub path: Arc, pub max_row: Line, pub excerpts: Vec, @@ -70,16 +64,20 @@ pub struct Excerpt { #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)] pub enum PromptFormat { - MarkedExcerpt, - LabeledSections, - NumLinesUniDiff, + /// XML old_tex/new_text OldTextNewText, - /// Prompt format intended for use via zeta_cli + /// Prompt format intended for use via edit_prediction_cli OnlySnippets, + /// One-sentence instructions used in fine-tuned models + Minimal, + /// One-sentence instructions + FIM-like template + MinimalQwen, + /// No instructions, Qwen chat + Seed-Coder 1120 FIM-like template + SeedCoder1120, } impl PromptFormat { - pub const DEFAULT: PromptFormat = PromptFormat::NumLinesUniDiff; + pub const DEFAULT: PromptFormat = PromptFormat::Minimal; } impl Default for PromptFormat { @@ -97,11 +95,11 @@ impl PromptFormat { impl std::fmt::Display for PromptFormat { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - PromptFormat::MarkedExcerpt => write!(f, "Marked Excerpt"), - PromptFormat::LabeledSections => write!(f, "Labeled Sections"), PromptFormat::OnlySnippets => write!(f, "Only Snippets"), - PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"), PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"), + PromptFormat::Minimal => write!(f, "Minimal"), + PromptFormat::MinimalQwen => write!(f, "Minimal + Qwen FIM"), + PromptFormat::SeedCoder1120 => write!(f, "Seed-Coder 1120"), } } } @@ -111,10 +109,11 @@ impl std::fmt::Display for PromptFormat { #[serde(tag = "event")] pub enum Event { BufferChange { - path: Option, - old_path: Option, + path: Arc, + old_path: Arc, diff: String, predicted: bool, + in_open_source_repo: bool, }, } @@ -126,23 +125,21 @@ impl Display for Event { old_path, diff, predicted, + .. } => { - let new_path = path.as_deref().unwrap_or(Path::new("untitled")); - let old_path = old_path.as_deref().unwrap_or(new_path); - if *predicted { write!( f, "// User accepted prediction:\n--- a/{}\n+++ b/{}\n{diff}", DiffPathFmt(old_path), - DiffPathFmt(new_path) + DiffPathFmt(path) ) } else { write!( f, "--- a/{}\n+++ b/{}\n{diff}", DiffPathFmt(old_path), - DiffPathFmt(new_path) + DiffPathFmt(path) ) } } @@ -168,67 +165,6 @@ impl<'a> std::fmt::Display for DiffPathFmt<'a> { } } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Signature { - pub text: String, - pub text_is_truncated: bool, - #[serde(skip_serializing_if = "Option::is_none", default)] - pub parent_index: Option, - /// Range of `text` within the file, possibly truncated according to `text_is_truncated`. The - /// file is implicitly the file that contains the descendant declaration or excerpt. - pub range: Range, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ReferencedDeclaration { - pub path: Arc, - pub text: String, - pub text_is_truncated: bool, - /// Range of `text` within file, possibly truncated according to `text_is_truncated` - pub range: Range, - /// Range within `text` - pub signature_range: Range, - /// Index within `signatures`. - #[serde(skip_serializing_if = "Option::is_none", default)] - pub parent_index: Option, - pub score_components: DeclarationScoreComponents, - pub signature_score: f32, - pub declaration_score: f32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DeclarationScoreComponents { - pub is_same_file: bool, - pub is_referenced_nearby: bool, - pub is_referenced_in_breadcrumb: bool, - pub reference_count: usize, - pub same_file_declaration_count: usize, - pub declaration_count: usize, - pub reference_line_distance: u32, - pub declaration_line_distance: u32, - pub excerpt_vs_item_jaccard: f32, - pub excerpt_vs_signature_jaccard: f32, - pub adjacent_vs_item_jaccard: f32, - pub adjacent_vs_signature_jaccard: f32, - pub excerpt_vs_item_weighted_overlap: f32, - pub excerpt_vs_signature_weighted_overlap: f32, - pub adjacent_vs_item_weighted_overlap: f32, - pub adjacent_vs_signature_weighted_overlap: f32, - pub path_import_match_count: usize, - pub wildcard_path_import_match_count: usize, - pub import_similarity: f32, - pub max_import_similarity: f32, - pub normalized_import_similarity: f32, - pub wildcard_import_similarity: f32, - pub normalized_wildcard_import_similarity: f32, - pub included_by_others: usize, - pub includes_others: usize, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(transparent)] -pub struct DiagnosticGroup(pub Box); - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PredictEditsResponse { pub request_id: Uuid, @@ -252,10 +188,6 @@ pub struct Edit { pub content: String, } -fn is_default(value: &T) -> bool { - *value == T::default() -} - #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)] pub struct Point { pub line: Line, @@ -291,10 +223,11 @@ mod tests { #[test] fn test_event_display() { let ev = Event::BufferChange { - path: None, - old_path: None, + path: Path::new("untitled").into(), + old_path: Path::new("untitled").into(), diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), predicted: false, + in_open_source_repo: true, }; assert_eq!( ev.to_string(), @@ -308,10 +241,11 @@ mod tests { ); let ev = Event::BufferChange { - path: Some(PathBuf::from("foo/bar.txt")), - old_path: Some(PathBuf::from("foo/bar.txt")), + path: Path::new("foo/bar.txt").into(), + old_path: Path::new("foo/bar.txt").into(), diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), predicted: false, + in_open_source_repo: true, }; assert_eq!( ev.to_string(), @@ -325,10 +259,11 @@ mod tests { ); let ev = Event::BufferChange { - path: Some(PathBuf::from("abc.txt")), - old_path: Some(PathBuf::from("123.txt")), + path: Path::new("abc.txt").into(), + old_path: Path::new("123.txt").into(), diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), predicted: false, + in_open_source_repo: true, }; assert_eq!( ev.to_string(), @@ -342,10 +277,11 @@ mod tests { ); let ev = Event::BufferChange { - path: Some(PathBuf::from("abc.txt")), - old_path: Some(PathBuf::from("123.txt")), + path: Path::new("abc.txt").into(), + old_path: Path::new("123.txt").into(), diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), predicted: true, + in_open_source_repo: true, }; assert_eq!( ev.to_string(), diff --git a/crates/cloud_zeta2_prompt/Cargo.toml b/crates/cloud_zeta2_prompt/Cargo.toml deleted file mode 100644 index 8be10265cb23e7dd0983c52e7c2d6984b62c4be4..0000000000000000000000000000000000000000 --- a/crates/cloud_zeta2_prompt/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "cloud_zeta2_prompt" -version = "0.1.0" -publish.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/cloud_zeta2_prompt.rs" - -[dependencies] -anyhow.workspace = true -cloud_llm_client.workspace = true -indoc.workspace = true -ordered-float.workspace = true -rustc-hash.workspace = true -schemars.workspace = true -serde.workspace = true -strum.workspace = true diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs deleted file mode 100644 index 3f0bd476c50b9e6f92a9f457af15899fcb33b8ed..0000000000000000000000000000000000000000 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ /dev/null @@ -1,792 +0,0 @@ -//! Zeta2 prompt planning and generation code shared with cloud. -pub mod retrieval_prompt; - -use anyhow::{Context as _, Result, anyhow}; -use cloud_llm_client::predict_edits_v3::{ - self, DiffPathFmt, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration, -}; -use indoc::indoc; -use ordered_float::OrderedFloat; -use rustc_hash::{FxHashMap, FxHashSet}; -use serde::Serialize; -use std::cmp; -use std::fmt::Write; -use std::sync::Arc; -use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path}; -use strum::{EnumIter, IntoEnumIterator}; - -pub const DEFAULT_MAX_PROMPT_BYTES: usize = 10 * 1024; - -pub const CURSOR_MARKER: &str = "<|user_cursor|>"; -/// NOTE: Differs from zed version of constant - includes a newline -pub const EDITABLE_REGION_START_MARKER_WITH_NEWLINE: &str = "<|editable_region_start|>\n"; -/// NOTE: Differs from zed version of constant - includes a newline -pub const EDITABLE_REGION_END_MARKER_WITH_NEWLINE: &str = "<|editable_region_end|>\n"; - -// TODO: use constants for markers? -const MARKED_EXCERPT_INSTRUCTIONS: &str = indoc! {" - You are a code completion assistant and your task is to analyze user edits and then rewrite an excerpt that the user provides, suggesting the appropriate edits within the excerpt, taking into account the cursor location. - - The excerpt to edit will be wrapped in markers <|editable_region_start|> and <|editable_region_end|>. The cursor position is marked with <|user_cursor|>. Please respond with edited code for that region. - - Other code is provided for context, and `…` indicates when code has been skipped. - - # Edit History: - -"}; - -const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#" - You are a code completion assistant and your task is to analyze user edits, and suggest an edit to one of the provided sections of code. - - Sections of code are grouped by file and then labeled by `<|section_N|>` (e.g `<|section_8|>`). - - The cursor position is marked with `<|user_cursor|>` and it will appear within a special section labeled `<|current_section|>`. Prefer editing the current section until no more changes are needed within it. - - Respond ONLY with the name of the section to edit on a single line, followed by all of the code that should replace that section. For example: - - <|current_section|> - for i in 0..16 { - println!("{i}"); - } - - # Edit History: - -"#}; - -const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#" - # Instructions - - You are an edit prediction agent in a code editor. - Your job is to predict the next edit that the user will make, - based on their last few edits and their current cursor location. - - ## Output Format - - You must briefly explain your understanding of the user's goal, in one - or two sentences, and then specify their next edit in the form of a - unified diff, like this: - - ``` - --- a/src/myapp/cli.py - +++ b/src/myapp/cli.py - @@ ... @@ - import os - import time - import sys - +from constants import LOG_LEVEL_WARNING - @@ ... @@ - config.headless() - config.set_interactive(false) - -config.set_log_level(LOG_L) - +config.set_log_level(LOG_LEVEL_WARNING) - config.set_use_color(True) - ``` - - ## Edit History - -"#}; - -const UNIFIED_DIFF_REMINDER: &str = indoc! {" - --- - - Analyze the edit history and the files, then provide the unified diff for your predicted edits. - Do not include the cursor marker in your output. - Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`). - Do not include line numbers in the hunk headers, use `@@ ... @@`. - Removed lines begin with `-`. - Added lines begin with `+`. - Context lines begin with an extra space. - Context and removed lines are used to match the target edit location, so make sure to include enough of them - to uniquely identify it amongst all excerpts of code provided. -"}; - -const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#" - # Instructions - - You are an edit prediction agent in a code editor. - Your job is to predict the next edit that the user will make, - based on their last few edits and their current cursor location. - - # Output Format - - You must briefly explain your understanding of the user's goal, in one - or two sentences, and then specify their next edit, using the following - XML format: - - - - OLD TEXT 1 HERE - - - NEW TEXT 1 HERE - - - - OLD TEXT 1 HERE - - - NEW TEXT 1 HERE - - - - - Specify the file to edit using the `path` attribute. - - Use `` and `` tags to replace content - - `` must exactly match existing file content, including indentation - - `` cannot be empty - - Do not escape quotes, newlines, or other characters within tags - - Always close all tags properly - - Don't include the <|user_cursor|> marker in your output. - - # Edit History: - -"#}; - -const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#" - --- - - Remember that the edits in the edit history have already been deployed. - The files are currently as shown in the Code Excerpts section. -"#}; - -pub fn build_prompt( - request: &predict_edits_v3::PredictEditsRequest, -) -> Result<(String, SectionLabels)> { - let mut insertions = match request.prompt_format { - PromptFormat::MarkedExcerpt => vec![ - ( - Point { - line: request.excerpt_line_range.start, - column: 0, - }, - EDITABLE_REGION_START_MARKER_WITH_NEWLINE, - ), - (request.cursor_point, CURSOR_MARKER), - ( - Point { - line: request.excerpt_line_range.end, - column: 0, - }, - EDITABLE_REGION_END_MARKER_WITH_NEWLINE, - ), - ], - PromptFormat::LabeledSections - | PromptFormat::NumLinesUniDiff - | PromptFormat::OldTextNewText => { - vec![(request.cursor_point, CURSOR_MARKER)] - } - PromptFormat::OnlySnippets => vec![], - }; - - let mut prompt = match request.prompt_format { - PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), - PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), - PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), - PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(), - PromptFormat::OnlySnippets => String::new(), - }; - - if request.events.is_empty() { - prompt.push_str("(No edit history)\n\n"); - } else { - prompt.push_str("Here are the latest edits made by the user, from earlier to later.\n\n"); - push_events(&mut prompt, &request.events); - } - - prompt.push_str(indoc! {" - # Code Excerpts - - The cursor marker <|user_cursor|> indicates the current user cursor position. - The file is in current state, edits from edit history have been applied. - "}); - - if request.prompt_format == PromptFormat::NumLinesUniDiff { - prompt.push_str(indoc! {" - We prepend line numbers (e.g., `123|`); they are not part of the file. - "}); - } - - prompt.push('\n'); - - let mut section_labels = Default::default(); - - if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() { - let syntax_based_prompt = SyntaxBasedPrompt::populate(request)?; - section_labels = syntax_based_prompt.write(&mut insertions, &mut prompt)?; - } else { - if request.prompt_format == PromptFormat::LabeledSections { - anyhow::bail!("PromptFormat::LabeledSections cannot be used with ContextMode::Llm"); - } - - for related_file in &request.included_files { - write_codeblock( - &related_file.path, - &related_file.excerpts, - if related_file.path == request.excerpt_path { - &insertions - } else { - &[] - }, - related_file.max_row, - request.prompt_format == PromptFormat::NumLinesUniDiff, - &mut prompt, - ); - } - } - - match request.prompt_format { - PromptFormat::NumLinesUniDiff => { - prompt.push_str(UNIFIED_DIFF_REMINDER); - } - PromptFormat::OldTextNewText => { - prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER); - } - _ => {} - } - - Ok((prompt, section_labels)) -} - -pub fn write_codeblock<'a>( - path: &Path, - excerpts: impl IntoIterator, - sorted_insertions: &[(Point, &str)], - file_line_count: Line, - include_line_numbers: bool, - output: &'a mut String, -) { - writeln!(output, "`````{}", DiffPathFmt(path)).unwrap(); - write_excerpts( - excerpts, - sorted_insertions, - file_line_count, - include_line_numbers, - output, - ); - write!(output, "`````\n\n").unwrap(); -} - -pub fn write_excerpts<'a>( - excerpts: impl IntoIterator, - sorted_insertions: &[(Point, &str)], - file_line_count: Line, - include_line_numbers: bool, - output: &mut String, -) { - let mut current_row = Line(0); - let mut sorted_insertions = sorted_insertions.iter().peekable(); - - for excerpt in excerpts { - if excerpt.start_line > current_row { - writeln!(output, "…").unwrap(); - } - if excerpt.text.is_empty() { - return; - } - - current_row = excerpt.start_line; - - for mut line in excerpt.text.lines() { - if include_line_numbers { - write!(output, "{}|", current_row.0 + 1).unwrap(); - } - - while let Some((insertion_location, insertion_marker)) = sorted_insertions.peek() { - match current_row.cmp(&insertion_location.line) { - cmp::Ordering::Equal => { - let (prefix, suffix) = line.split_at(insertion_location.column as usize); - output.push_str(prefix); - output.push_str(insertion_marker); - line = suffix; - sorted_insertions.next(); - } - cmp::Ordering::Less => break, - cmp::Ordering::Greater => { - sorted_insertions.next(); - break; - } - } - } - output.push_str(line); - output.push('\n'); - current_row.0 += 1; - } - } - - if current_row < file_line_count { - writeln!(output, "…").unwrap(); - } -} - -pub fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) { - if events.is_empty() { - return; - }; - - writeln!(output, "`````diff").unwrap(); - for event in events { - writeln!(output, "{}", event).unwrap(); - } - writeln!(output, "`````\n").unwrap(); -} - -pub struct SyntaxBasedPrompt<'a> { - request: &'a predict_edits_v3::PredictEditsRequest, - /// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in - /// `to_prompt_string`. - snippets: Vec>, - budget_used: usize, -} - -#[derive(Clone, Debug)] -pub struct PlannedSnippet<'a> { - path: Arc, - range: Range, - text: &'a str, - // TODO: Indicate this in the output - #[allow(dead_code)] - text_is_truncated: bool, -} - -#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] -pub enum DeclarationStyle { - Signature, - Declaration, -} - -#[derive(Default, Clone, Debug, Serialize)] -pub struct SectionLabels { - pub excerpt_index: usize, - pub section_ranges: Vec<(Arc, Range)>, -} - -impl<'a> SyntaxBasedPrompt<'a> { - /// Greedy one-pass knapsack algorithm to populate the prompt plan. Does the following: - /// - /// Initializes a priority queue by populating it with each snippet, finding the - /// DeclarationStyle that minimizes `score_density = score / snippet.range(style).len()`. When a - /// "signature" snippet is popped, insert an entry for the "declaration" variant that reflects - /// the cost of upgrade. - /// - /// TODO: Implement an early halting condition. One option might be to have another priority - /// queue where the score is the size, and update it accordingly. Another option might be to - /// have some simpler heuristic like bailing after N failed insertions, or based on how much - /// budget is left. - /// - /// TODO: Has the current known sources of imprecision: - /// - /// * Does not consider snippet overlap when ranking. For example, it might add a field to the - /// plan even though the containing struct is already included. - /// - /// * Does not consider cost of signatures when ranking snippets - this is tricky since - /// signatures may be shared by multiple snippets. - /// - /// * Does not include file paths / other text when considering max_bytes. - pub fn populate(request: &'a predict_edits_v3::PredictEditsRequest) -> Result { - let mut this = Self { - request, - snippets: Vec::new(), - budget_used: request.excerpt.len(), - }; - let mut included_parents = FxHashSet::default(); - let additional_parents = this.additional_parent_signatures( - &request.excerpt_path, - request.excerpt_parent, - &included_parents, - )?; - this.add_parents(&mut included_parents, additional_parents); - - let max_bytes = request.prompt_max_bytes.unwrap_or(DEFAULT_MAX_PROMPT_BYTES); - - if this.budget_used > max_bytes { - return Err(anyhow!( - "Excerpt + signatures size of {} already exceeds budget of {}", - this.budget_used, - max_bytes - )); - } - - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] - struct QueueEntry { - score_density: OrderedFloat, - declaration_index: usize, - style: DeclarationStyle, - } - - // Initialize priority queue with the best score for each snippet. - let mut queue: BinaryHeap = BinaryHeap::new(); - for (declaration_index, declaration) in request.referenced_declarations.iter().enumerate() { - let (style, score_density) = DeclarationStyle::iter() - .map(|style| { - ( - style, - OrderedFloat(declaration_score_density(&declaration, style)), - ) - }) - .max_by_key(|(_, score_density)| *score_density) - .unwrap(); - queue.push(QueueEntry { - score_density, - declaration_index, - style, - }); - } - - // Knapsack selection loop - while let Some(queue_entry) = queue.pop() { - let Some(declaration) = request - .referenced_declarations - .get(queue_entry.declaration_index) - else { - return Err(anyhow!( - "Invalid declaration index {}", - queue_entry.declaration_index - )); - }; - - let mut additional_bytes = declaration_size(declaration, queue_entry.style); - if this.budget_used + additional_bytes > max_bytes { - continue; - } - - let additional_parents = this.additional_parent_signatures( - &declaration.path, - declaration.parent_index, - &mut included_parents, - )?; - additional_bytes += additional_parents - .iter() - .map(|(_, snippet)| snippet.text.len()) - .sum::(); - if this.budget_used + additional_bytes > max_bytes { - continue; - } - - this.budget_used += additional_bytes; - this.add_parents(&mut included_parents, additional_parents); - let planned_snippet = match queue_entry.style { - DeclarationStyle::Signature => { - let Some(text) = declaration.text.get(declaration.signature_range.clone()) - else { - return Err(anyhow!( - "Invalid declaration signature_range {:?} with text.len() = {}", - declaration.signature_range, - declaration.text.len() - )); - }; - let signature_start_line = declaration.range.start - + Line( - declaration.text[..declaration.signature_range.start] - .lines() - .count() as u32, - ); - let signature_end_line = signature_start_line - + Line( - declaration.text - [declaration.signature_range.start..declaration.signature_range.end] - .lines() - .count() as u32, - ); - let range = signature_start_line..signature_end_line; - - PlannedSnippet { - path: declaration.path.clone(), - range, - text, - text_is_truncated: declaration.text_is_truncated, - } - } - DeclarationStyle::Declaration => PlannedSnippet { - path: declaration.path.clone(), - range: declaration.range.clone(), - text: &declaration.text, - text_is_truncated: declaration.text_is_truncated, - }, - }; - this.snippets.push(planned_snippet); - - // When a Signature is consumed, insert an entry for Definition style. - if queue_entry.style == DeclarationStyle::Signature { - let signature_size = declaration_size(&declaration, DeclarationStyle::Signature); - let declaration_size = - declaration_size(&declaration, DeclarationStyle::Declaration); - let signature_score = declaration_score(&declaration, DeclarationStyle::Signature); - let declaration_score = - declaration_score(&declaration, DeclarationStyle::Declaration); - - let score_diff = declaration_score - signature_score; - let size_diff = declaration_size.saturating_sub(signature_size); - if score_diff > 0.0001 && size_diff > 0 { - queue.push(QueueEntry { - declaration_index: queue_entry.declaration_index, - score_density: OrderedFloat(score_diff / (size_diff as f32)), - style: DeclarationStyle::Declaration, - }); - } - } - } - - anyhow::Ok(this) - } - - fn add_parents( - &mut self, - included_parents: &mut FxHashSet, - snippets: Vec<(usize, PlannedSnippet<'a>)>, - ) { - for (parent_index, snippet) in snippets { - included_parents.insert(parent_index); - self.budget_used += snippet.text.len(); - self.snippets.push(snippet); - } - } - - fn additional_parent_signatures( - &self, - path: &Arc, - parent_index: Option, - included_parents: &FxHashSet, - ) -> Result)>> { - let mut results = Vec::new(); - self.additional_parent_signatures_impl(path, parent_index, included_parents, &mut results)?; - Ok(results) - } - - fn additional_parent_signatures_impl( - &self, - path: &Arc, - parent_index: Option, - included_parents: &FxHashSet, - results: &mut Vec<(usize, PlannedSnippet<'a>)>, - ) -> Result<()> { - let Some(parent_index) = parent_index else { - return Ok(()); - }; - if included_parents.contains(&parent_index) { - return Ok(()); - } - let Some(parent_signature) = self.request.signatures.get(parent_index) else { - return Err(anyhow!("Invalid parent index {}", parent_index)); - }; - results.push(( - parent_index, - PlannedSnippet { - path: path.clone(), - range: parent_signature.range.clone(), - text: &parent_signature.text, - text_is_truncated: parent_signature.text_is_truncated, - }, - )); - self.additional_parent_signatures_impl( - path, - parent_signature.parent_index, - included_parents, - results, - ) - } - - /// Renders the planned context. Each file starts with "```FILE_PATH\n` and ends with triple - /// backticks, with a newline after each file. Outputs a line with "..." between nonconsecutive - /// chunks. - pub fn write( - &'a self, - excerpt_file_insertions: &mut Vec<(Point, &'static str)>, - prompt: &mut String, - ) -> Result { - let mut file_to_snippets: FxHashMap<&'a std::path::Path, Vec<&PlannedSnippet<'a>>> = - FxHashMap::default(); - for snippet in &self.snippets { - file_to_snippets - .entry(&snippet.path) - .or_default() - .push(snippet); - } - - // Reorder so that file with cursor comes last - let mut file_snippets = Vec::new(); - let mut excerpt_file_snippets = Vec::new(); - for (file_path, snippets) in file_to_snippets { - if file_path == self.request.excerpt_path.as_ref() { - excerpt_file_snippets = snippets; - } else { - file_snippets.push((file_path, snippets, false)); - } - } - let excerpt_snippet = PlannedSnippet { - path: self.request.excerpt_path.clone(), - range: self.request.excerpt_line_range.clone(), - text: &self.request.excerpt, - text_is_truncated: false, - }; - excerpt_file_snippets.push(&excerpt_snippet); - file_snippets.push((&self.request.excerpt_path, excerpt_file_snippets, true)); - - let section_labels = - self.push_file_snippets(prompt, excerpt_file_insertions, file_snippets)?; - - Ok(section_labels) - } - - fn push_file_snippets( - &self, - output: &mut String, - excerpt_file_insertions: &mut Vec<(Point, &'static str)>, - file_snippets: Vec<(&'a Path, Vec<&'a PlannedSnippet>, bool)>, - ) -> Result { - let mut section_ranges = Vec::new(); - let mut excerpt_index = None; - - for (file_path, mut snippets, is_excerpt_file) in file_snippets { - snippets.sort_by_key(|s| (s.range.start, Reverse(s.range.end))); - - // TODO: What if the snippets get expanded too large to be editable? - let mut current_snippet: Option<(&PlannedSnippet, Range)> = None; - let mut disjoint_snippets: Vec<(&PlannedSnippet, Range)> = Vec::new(); - for snippet in snippets { - if let Some((_, current_snippet_range)) = current_snippet.as_mut() - && snippet.range.start <= current_snippet_range.end - { - current_snippet_range.end = current_snippet_range.end.max(snippet.range.end); - continue; - } - if let Some(current_snippet) = current_snippet.take() { - disjoint_snippets.push(current_snippet); - } - current_snippet = Some((snippet, snippet.range.clone())); - } - if let Some(current_snippet) = current_snippet.take() { - disjoint_snippets.push(current_snippet); - } - - writeln!(output, "`````path={}", file_path.display()).ok(); - let mut skipped_last_snippet = false; - for (snippet, range) in disjoint_snippets { - let section_index = section_ranges.len(); - - match self.request.prompt_format { - PromptFormat::MarkedExcerpt - | PromptFormat::OnlySnippets - | PromptFormat::OldTextNewText - | PromptFormat::NumLinesUniDiff => { - if range.start.0 > 0 && !skipped_last_snippet { - output.push_str("…\n"); - } - } - PromptFormat::LabeledSections => { - if is_excerpt_file - && range.start <= self.request.excerpt_line_range.start - && range.end >= self.request.excerpt_line_range.end - { - writeln!(output, "<|current_section|>").ok(); - } else { - writeln!(output, "<|section_{}|>", section_index).ok(); - } - } - } - - let push_full_snippet = |output: &mut String| { - if self.request.prompt_format == PromptFormat::NumLinesUniDiff { - for (i, line) in snippet.text.lines().enumerate() { - writeln!(output, "{}|{}", i as u32 + range.start.0 + 1, line)?; - } - } else { - output.push_str(&snippet.text); - } - anyhow::Ok(()) - }; - - if is_excerpt_file { - if self.request.prompt_format == PromptFormat::OnlySnippets { - if range.start >= self.request.excerpt_line_range.start - && range.end <= self.request.excerpt_line_range.end - { - skipped_last_snippet = true; - } else { - skipped_last_snippet = false; - output.push_str(snippet.text); - } - } else if !excerpt_file_insertions.is_empty() { - let lines = snippet.text.lines().collect::>(); - let push_line = |output: &mut String, line_ix: usize| { - if self.request.prompt_format == PromptFormat::NumLinesUniDiff { - write!(output, "{}|", line_ix as u32 + range.start.0 + 1)?; - } - anyhow::Ok(writeln!(output, "{}", lines[line_ix])?) - }; - let mut last_line_ix = 0; - let mut insertion_ix = 0; - while insertion_ix < excerpt_file_insertions.len() { - let (point, insertion) = &excerpt_file_insertions[insertion_ix]; - let found = point.line >= range.start && point.line <= range.end; - if found { - excerpt_index = Some(section_index); - let insertion_line_ix = (point.line.0 - range.start.0) as usize; - for line_ix in last_line_ix..insertion_line_ix { - push_line(output, line_ix)?; - } - if let Some(next_line) = lines.get(insertion_line_ix) { - if self.request.prompt_format == PromptFormat::NumLinesUniDiff { - write!( - output, - "{}|", - insertion_line_ix as u32 + range.start.0 + 1 - )? - } - output.push_str(&next_line[..point.column as usize]); - output.push_str(insertion); - writeln!(output, "{}", &next_line[point.column as usize..])?; - } else { - writeln!(output, "{}", insertion)?; - } - last_line_ix = insertion_line_ix + 1; - excerpt_file_insertions.remove(insertion_ix); - continue; - } - insertion_ix += 1; - } - skipped_last_snippet = false; - for line_ix in last_line_ix..lines.len() { - push_line(output, line_ix)?; - } - } else { - skipped_last_snippet = false; - push_full_snippet(output)?; - } - } else { - skipped_last_snippet = false; - push_full_snippet(output)?; - } - - section_ranges.push((snippet.path.clone(), range)); - } - - output.push_str("`````\n\n"); - } - - Ok(SectionLabels { - // TODO: Clean this up - excerpt_index: match self.request.prompt_format { - PromptFormat::OnlySnippets => 0, - _ => excerpt_index.context("bug: no snippet found for excerpt")?, - }, - section_ranges, - }) - } -} - -fn declaration_score_density(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 { - declaration_score(declaration, style) / declaration_size(declaration, style) as f32 -} - -fn declaration_score(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 { - match style { - DeclarationStyle::Signature => declaration.signature_score, - DeclarationStyle::Declaration => declaration.declaration_score, - } -} - -fn declaration_size(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> usize { - match style { - DeclarationStyle::Signature => declaration.signature_range.len(), - DeclarationStyle::Declaration => declaration.text.len(), - } -} diff --git a/crates/cloud_zeta2_prompt/src/retrieval_prompt.rs b/crates/cloud_zeta2_prompt/src/retrieval_prompt.rs deleted file mode 100644 index 7fbc3834dfd0f4bbfc4085d696b7fbf755e6dd3d..0000000000000000000000000000000000000000 --- a/crates/cloud_zeta2_prompt/src/retrieval_prompt.rs +++ /dev/null @@ -1,94 +0,0 @@ -use anyhow::Result; -use cloud_llm_client::predict_edits_v3::{self, Excerpt}; -use indoc::indoc; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::fmt::Write; - -use crate::{push_events, write_codeblock}; - -pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> Result { - let mut prompt = SEARCH_INSTRUCTIONS.to_string(); - - if !request.events.is_empty() { - writeln!(&mut prompt, "## User Edits\n")?; - push_events(&mut prompt, &request.events); - } - - writeln!(&mut prompt, "## Cursor context")?; - write_codeblock( - &request.excerpt_path, - &[Excerpt { - start_line: request.excerpt_line_range.start, - text: request.excerpt.into(), - }], - &[], - request.cursor_file_max_row, - true, - &mut prompt, - ); - - writeln!(&mut prompt, "{TOOL_USE_REMINDER}")?; - - Ok(prompt) -} - -/// Search for relevant code -/// -/// For the best results, run multiple queries at once with a single invocation of this tool. -#[derive(Clone, Deserialize, Serialize, JsonSchema)] -pub struct SearchToolInput { - /// An array of queries to run for gathering context relevant to the next prediction - #[schemars(length(max = 3))] - pub queries: Box<[SearchToolQuery]>, -} - -/// Search for relevant code by path, syntax hierarchy, and content. -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] -pub struct SearchToolQuery { - /// 1. A glob pattern to match file paths in the codebase to search in. - pub glob: String, - /// 2. Regular expressions to match syntax nodes **by their first line** and hierarchy. - /// - /// Subsequent regexes match nodes within the full content of the nodes matched by the previous regexes. - /// - /// Example: Searching for a `User` class - /// ["class\s+User"] - /// - /// Example: Searching for a `get_full_name` method under a `User` class - /// ["class\s+User", "def\sget_full_name"] - /// - /// Skip this field to match on content alone. - #[schemars(length(max = 3))] - #[serde(default)] - pub syntax_node: Vec, - /// 3. An optional regular expression to match the final content that should appear in the results. - /// - /// - Content will be matched within all lines of the matched syntax nodes. - /// - If syntax node regexes are provided, this field can be skipped to include as much of the node itself as possible. - /// - If no syntax node regexes are provided, the content will be matched within the entire file. - pub content: Option, -} - -pub const TOOL_NAME: &str = "search"; - -const SEARCH_INSTRUCTIONS: &str = indoc! {r#" - You are part of an edit prediction system in a code editor. - Your role is to search for code that will serve as context for predicting the next edit. - - - Analyze the user's recent edits and current cursor context - - Use the `search` tool to find code that is relevant for predicting the next edit - - Focus on finding: - - Code patterns that might need similar changes based on the recent edits - - Functions, variables, types, and constants referenced in the current cursor context - - Related implementations, usages, or dependencies that may require consistent updates - - How items defined in the cursor excerpt are used or altered - - You will not be able to filter results or perform subsequent queries, so keep searches as targeted as possible - - Use `syntax_node` parameter whenever you're looking for a particular type, class, or function - - Avoid using wildcard globs if you already know the file path of the content you're looking for -"#}; - -const TOOL_USE_REMINDER: &str = indoc! {" - -- - Analyze the user's intent in one to two sentences, then call the `search` tool. -"}; diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml index b402274a33530424349081da764a4b6766e419e9..7f3bf3b22dda8f9dbde1923c76855342c6cbac4c 100644 --- a/crates/codestral/Cargo.toml +++ b/crates/codestral/Cargo.toml @@ -10,7 +10,7 @@ path = "src/codestral.rs" [dependencies] anyhow.workspace = true -edit_prediction.workspace = true +edit_prediction_types.workspace = true edit_prediction_context.workspace = true futures.workspace = true gpui.workspace = true diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index 9fbd207a809fb2cb3ac685ea6629a36c8631d1fe..9cf2fab80b78ba06c6a2523013e2f73934f50052 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -1,6 +1,6 @@ use anyhow::{Context as _, Result}; -use edit_prediction::{Direction, EditPrediction, EditPredictionProvider}; use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions}; +use edit_prediction_types::{EditPrediction, EditPredictionDelegate}; use futures::AsyncReadExt; use gpui::{App, Context, Entity, Task}; use http_client::HttpClient; @@ -43,17 +43,17 @@ impl CurrentCompletion { /// Attempts to adjust the edits based on changes made to the buffer since the completion was generated. /// Returns None if the user's edits conflict with the predicted edits. fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option, Arc)>> { - edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits) + edit_prediction_types::interpolate_edits(&self.snapshot, new_snapshot, &self.edits) } } -pub struct CodestralCompletionProvider { +pub struct CodestralEditPredictionDelegate { http_client: Arc, pending_request: Option>>, current_completion: Option, } -impl CodestralCompletionProvider { +impl CodestralEditPredictionDelegate { pub fn new(http_client: Arc) -> Self { Self { http_client, @@ -165,7 +165,7 @@ impl CodestralCompletionProvider { } } -impl EditPredictionProvider for CodestralCompletionProvider { +impl EditPredictionDelegate for CodestralEditPredictionDelegate { fn name() -> &'static str { "codestral" } @@ -174,7 +174,7 @@ impl EditPredictionProvider for CodestralCompletionProvider { "Codestral" } - fn show_completions_in_menu() -> bool { + fn show_predictions_in_menu() -> bool { true } @@ -182,7 +182,7 @@ impl EditPredictionProvider for CodestralCompletionProvider { Self::api_key(cx).is_some() } - fn is_refreshing(&self) -> bool { + fn is_refreshing(&self, _cx: &App) -> bool { self.pending_request.is_some() } @@ -239,7 +239,6 @@ impl EditPredictionProvider for CodestralCompletionProvider { cursor_point, &snapshot, &EXCERPT_OPTIONS, - None, ) .context("Line containing cursor doesn't fit in excerpt max bytes")?; @@ -301,16 +300,6 @@ impl EditPredictionProvider for CodestralCompletionProvider { })); } - fn cycle( - &mut self, - _buffer: Entity, - _cursor_position: Anchor, - _direction: Direction, - _cx: &mut Context, - ) { - // Codestral doesn't support multiple completions, so cycling does nothing - } - fn accept(&mut self, _cx: &mut Context) { log::debug!("Codestral: Completion accepted"); self.pending_request = None; diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index c8467da7954b195c0eef09ce1bed8361d7fa2c7b..79fc21fe33423d7eb887744b4ad84094a022862e 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -50,7 +50,6 @@ scrypt = "0.11" # sea-orm and sea-orm-macros versions must match exactly. sea-orm = { version = "=1.1.10", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } sea-orm-macros = "=1.1.10" -semantic_version.workspace = true semver.workspace = true serde.workspace = true serde_json.workspace = true @@ -66,7 +65,7 @@ tokio = { workspace = true, features = ["full"] } toml.workspace = true tower = "0.4" tower-http = { workspace = true, features = ["trace"] } -tracing = "0.1.40" +tracing.workspace = true tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927 util.workspace = true uuid.workspace = true diff --git a/crates/collab/README.md b/crates/collab/README.md index 0ec6d8008ba313357c4ac8e44555ff978d9a1121..902c9841e2d7fb0b52b3143002fd0da29b980802 100644 --- a/crates/collab/README.md +++ b/crates/collab/README.md @@ -63,15 +63,3 @@ Deployment is triggered by pushing to the `collab-staging` (or `collab-productio - `./script/deploy-collab production` You can tell what is currently deployed with `./script/what-is-deployed`. - -# Database Migrations - -To create a new migration: - -```sh -./script/create-migration -``` - -Migrations are run automatically on service start, so run `foreman start` again. The service will crash if the migrations fail. - -When you create a new migration, you also need to update the [SQLite schema](./migrations.sqlite/20221109000000_test_schema.sql) that is used for testing. diff --git a/crates/collab/k8s/migrate.template.yml b/crates/collab/k8s/migrate.template.yml deleted file mode 100644 index c890d7b330c0eca260ca327e5f7db10259f91eaa..0000000000000000000000000000000000000000 --- a/crates/collab/k8s/migrate.template.yml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: batch/v1 -kind: Job -metadata: - namespace: ${ZED_KUBE_NAMESPACE} - name: ${ZED_MIGRATE_JOB_NAME} -spec: - template: - spec: - restartPolicy: Never - containers: - - name: migrator - imagePullPolicy: Always - image: ${ZED_IMAGE_ID} - args: - - migrate - env: - - name: DATABASE_URL - valueFrom: - secretKeyRef: - name: database - key: url diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index a736ddfd1fe3334b1b847e820bd1816cb625ddca..32a2ed2e1331fc7b16f859accd895a7bce055804 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -121,6 +121,8 @@ CREATE TABLE "project_repositories" ( "merge_message" VARCHAR, "branch_summary" VARCHAR, "head_commit_details" VARCHAR, + "remote_upstream_url" VARCHAR, + "remote_origin_url" VARCHAR, PRIMARY KEY (project_id, id) ); diff --git a/crates/collab/migrations/20210527024318_initial_schema.sql b/crates/collab/migrations/20210527024318_initial_schema.sql deleted file mode 100644 index 4b065318484a5c352dde00da9ba55744c4da9adb..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20210527024318_initial_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ -CREATE TABLE IF NOT EXISTS "sessions" ( - "id" VARCHAR NOT NULL PRIMARY KEY, - "expires" TIMESTAMP WITH TIME ZONE NULL, - "session" TEXT NOT NULL -); - -CREATE TABLE IF NOT EXISTS "users" ( - "id" SERIAL PRIMARY KEY, - "github_login" VARCHAR, - "admin" BOOLEAN -); - -CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login"); - -CREATE TABLE IF NOT EXISTS "signups" ( - "id" SERIAL PRIMARY KEY, - "github_login" VARCHAR, - "email_address" VARCHAR, - "about" TEXT -); diff --git a/crates/collab/migrations/20210607190313_create_access_tokens.sql b/crates/collab/migrations/20210607190313_create_access_tokens.sql deleted file mode 100644 index 60745a98bae9ac8bc3e2016e598480e74e0b6473..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20210607190313_create_access_tokens.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE IF NOT EXISTS "access_tokens" ( - "id" SERIAL PRIMARY KEY, - "user_id" INTEGER REFERENCES users (id), - "hash" VARCHAR(128) -); - -CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id"); diff --git a/crates/collab/migrations/20210805175147_create_chat_tables.sql b/crates/collab/migrations/20210805175147_create_chat_tables.sql deleted file mode 100644 index 5bba4689d9c21e65d989cf05e2e1eedb0151621d..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20210805175147_create_chat_tables.sql +++ /dev/null @@ -1,46 +0,0 @@ -CREATE TABLE IF NOT EXISTS "orgs" ( - "id" SERIAL PRIMARY KEY, - "name" VARCHAR NOT NULL, - "slug" VARCHAR NOT NULL -); - -CREATE UNIQUE INDEX "index_orgs_slug" ON "orgs" ("slug"); - -CREATE TABLE IF NOT EXISTS "org_memberships" ( - "id" SERIAL PRIMARY KEY, - "org_id" INTEGER REFERENCES orgs (id) NOT NULL, - "user_id" INTEGER REFERENCES users (id) NOT NULL, - "admin" BOOLEAN NOT NULL -); - -CREATE INDEX "index_org_memberships_user_id" ON "org_memberships" ("user_id"); -CREATE UNIQUE INDEX "index_org_memberships_org_id_and_user_id" ON "org_memberships" ("org_id", "user_id"); - -CREATE TABLE IF NOT EXISTS "channels" ( - "id" SERIAL PRIMARY KEY, - "owner_id" INTEGER NOT NULL, - "owner_is_user" BOOLEAN NOT NULL, - "name" VARCHAR NOT NULL -); - -CREATE UNIQUE INDEX "index_channels_owner_and_name" ON "channels" ("owner_is_user", "owner_id", "name"); - -CREATE TABLE IF NOT EXISTS "channel_memberships" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER REFERENCES channels (id) NOT NULL, - "user_id" INTEGER REFERENCES users (id) NOT NULL, - "admin" BOOLEAN NOT NULL -); - -CREATE INDEX "index_channel_memberships_user_id" ON "channel_memberships" ("user_id"); -CREATE UNIQUE INDEX "index_channel_memberships_channel_id_and_user_id" ON "channel_memberships" ("channel_id", "user_id"); - -CREATE TABLE IF NOT EXISTS "channel_messages" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER REFERENCES channels (id) NOT NULL, - "sender_id" INTEGER REFERENCES users (id) NOT NULL, - "body" TEXT NOT NULL, - "sent_at" TIMESTAMP -); - -CREATE INDEX "index_channel_messages_channel_id" ON "channel_messages" ("channel_id"); diff --git a/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql b/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql deleted file mode 100644 index ee4d4aa319f6417e854137332011115570153eae..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20210916123647_add_nonce_to_channel_messages.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE "channel_messages" -ADD "nonce" UUID NOT NULL DEFAULT gen_random_uuid(); - -CREATE UNIQUE INDEX "index_channel_messages_nonce" ON "channel_messages" ("nonce"); diff --git a/crates/collab/migrations/20210920192001_add_interests_to_signups.sql b/crates/collab/migrations/20210920192001_add_interests_to_signups.sql deleted file mode 100644 index 2457abfc757a3d3c6171d9639e2c280981689ead..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20210920192001_add_interests_to_signups.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE "signups" - ADD "wants_releases" BOOLEAN, - ADD "wants_updates" BOOLEAN, - ADD "wants_community" BOOLEAN; \ No newline at end of file diff --git a/crates/collab/migrations/20220421165757_drop_signups.sql b/crates/collab/migrations/20220421165757_drop_signups.sql deleted file mode 100644 index d7cd6e204c95e2ea10ee4b4b8183cbe701abb4c0..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220421165757_drop_signups.sql +++ /dev/null @@ -1 +0,0 @@ -DROP TABLE IF EXISTS "signups"; diff --git a/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql b/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql deleted file mode 100644 index 3d6fd3179a236bf8407464f69f1e67469eb31d27..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220505144506_add_trigram_index_to_users.sql +++ /dev/null @@ -1,2 +0,0 @@ -CREATE EXTENSION IF NOT EXISTS pg_trgm; -CREATE INDEX trigram_index_users_on_github_login ON users USING GIN(github_login gin_trgm_ops); diff --git a/crates/collab/migrations/20220506130724_create_contacts.sql b/crates/collab/migrations/20220506130724_create_contacts.sql deleted file mode 100644 index 56beb70fd06ce8a3b7bb00d2f0ada2e465906c69..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220506130724_create_contacts.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE IF NOT EXISTS "contacts" ( - "id" SERIAL PRIMARY KEY, - "user_id_a" INTEGER REFERENCES users (id) NOT NULL, - "user_id_b" INTEGER REFERENCES users (id) NOT NULL, - "a_to_b" BOOLEAN NOT NULL, - "should_notify" BOOLEAN NOT NULL, - "accepted" BOOLEAN NOT NULL -); - -CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b"); -CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b"); diff --git a/crates/collab/migrations/20220518151305_add_invites_to_users.sql b/crates/collab/migrations/20220518151305_add_invites_to_users.sql deleted file mode 100644 index 2ac89b649e8adab60ac93aeba36476d92484dc93..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220518151305_add_invites_to_users.sql +++ /dev/null @@ -1,9 +0,0 @@ -ALTER TABLE users -ADD email_address VARCHAR(255) DEFAULT NULL, -ADD invite_code VARCHAR(64), -ADD invite_count INTEGER NOT NULL DEFAULT 0, -ADD inviter_id INTEGER REFERENCES users (id), -ADD connected_once BOOLEAN NOT NULL DEFAULT false, -ADD created_at TIMESTAMP NOT NULL DEFAULT NOW(); - -CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code"); diff --git a/crates/collab/migrations/20220523232954_allow_user_deletes.sql b/crates/collab/migrations/20220523232954_allow_user_deletes.sql deleted file mode 100644 index ddf3f6f9bd094bfa96f75efe72b64da06e47d0c5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220523232954_allow_user_deletes.sql +++ /dev/null @@ -1,6 +0,0 @@ -ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_a_fkey; -ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_b_fkey; -ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_a_fkey FOREIGN KEY (user_id_a) REFERENCES users(id) ON DELETE CASCADE; -ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_b_fkey FOREIGN KEY (user_id_b) REFERENCES users(id) ON DELETE CASCADE; -ALTER TABLE users DROP CONSTRAINT users_inviter_id_fkey; -ALTER TABLE users ADD CONSTRAINT users_inviter_id_fkey FOREIGN KEY (inviter_id) REFERENCES users(id) ON DELETE SET NULL; diff --git a/crates/collab/migrations/20220620211403_create_projects.sql b/crates/collab/migrations/20220620211403_create_projects.sql deleted file mode 100644 index d813c9f7a1811e50227c312c66df0fd679c35166..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220620211403_create_projects.sql +++ /dev/null @@ -1,24 +0,0 @@ -CREATE TABLE IF NOT EXISTS "projects" ( - "id" SERIAL PRIMARY KEY, - "host_user_id" INTEGER REFERENCES users (id) NOT NULL, - "unregistered" BOOLEAN NOT NULL DEFAULT false -); - -CREATE TABLE IF NOT EXISTS "worktree_extensions" ( - "id" SERIAL PRIMARY KEY, - "project_id" INTEGER REFERENCES projects (id) NOT NULL, - "worktree_id" INTEGER NOT NULL, - "extension" VARCHAR(255), - "count" INTEGER NOT NULL -); - -CREATE TABLE IF NOT EXISTS "project_activity_periods" ( - "id" SERIAL PRIMARY KEY, - "duration_millis" INTEGER NOT NULL, - "ended_at" TIMESTAMP NOT NULL, - "user_id" INTEGER REFERENCES users (id) NOT NULL, - "project_id" INTEGER REFERENCES projects (id) NOT NULL -); - -CREATE INDEX "index_project_activity_periods_on_ended_at" ON "project_activity_periods" ("ended_at"); -CREATE UNIQUE INDEX "index_worktree_extensions_on_project_id_and_worktree_id_and_extension" ON "worktree_extensions" ("project_id", "worktree_id", "extension"); \ No newline at end of file diff --git a/crates/collab/migrations/20220913211150_create_signups.sql b/crates/collab/migrations/20220913211150_create_signups.sql deleted file mode 100644 index 19559b747c33b0fc146572201ba3dd1d1c37bf47..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220913211150_create_signups.sql +++ /dev/null @@ -1,27 +0,0 @@ -CREATE TABLE IF NOT EXISTS "signups" ( - "id" SERIAL PRIMARY KEY, - "email_address" VARCHAR NOT NULL, - "email_confirmation_code" VARCHAR(64) NOT NULL, - "email_confirmation_sent" BOOLEAN NOT NULL, - "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - "device_id" VARCHAR, - "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE, - "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL, - - "platform_mac" BOOLEAN NOT NULL, - "platform_linux" BOOLEAN NOT NULL, - "platform_windows" BOOLEAN NOT NULL, - "platform_unknown" BOOLEAN NOT NULL, - - "editor_features" VARCHAR[], - "programming_languages" VARCHAR[] -); - -CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address"); -CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent"); - -ALTER TABLE "users" - ADD "github_user_id" INTEGER; - -CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); -CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); diff --git a/crates/collab/migrations/20220929182110_add_metrics_id.sql b/crates/collab/migrations/20220929182110_add_metrics_id.sql deleted file mode 100644 index 665d6323bf13b5553859ae6763392770dc33bebb..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20220929182110_add_metrics_id.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "users" - ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid(); diff --git a/crates/collab/migrations/20221111092550_reconnection_support.sql b/crates/collab/migrations/20221111092550_reconnection_support.sql deleted file mode 100644 index 3289f6bbddb63e08acdc5e89a900193359423b2c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221111092550_reconnection_support.sql +++ /dev/null @@ -1,90 +0,0 @@ -CREATE TABLE IF NOT EXISTS "rooms" ( - "id" SERIAL PRIMARY KEY, - "live_kit_room" VARCHAR NOT NULL -); - -ALTER TABLE "projects" - ADD "room_id" INTEGER REFERENCES rooms (id), - ADD "host_connection_id" INTEGER, - ADD "host_connection_epoch" UUID; -CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch"); - -CREATE TABLE "worktrees" ( - "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, - "id" INT8 NOT NULL, - "root_name" VARCHAR NOT NULL, - "abs_path" VARCHAR NOT NULL, - "visible" BOOL NOT NULL, - "scan_id" INT8 NOT NULL, - "is_complete" BOOL NOT NULL, - PRIMARY KEY(project_id, id) -); -CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id"); - -CREATE TABLE "worktree_entries" ( - "project_id" INTEGER NOT NULL, - "worktree_id" INT8 NOT NULL, - "id" INT8 NOT NULL, - "is_dir" BOOL NOT NULL, - "path" VARCHAR NOT NULL, - "inode" INT8 NOT NULL, - "mtime_seconds" INT8 NOT NULL, - "mtime_nanos" INTEGER NOT NULL, - "is_symlink" BOOL NOT NULL, - "is_ignored" BOOL NOT NULL, - PRIMARY KEY(project_id, worktree_id, id), - FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE -); -CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id"); -CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id"); - -CREATE TABLE "worktree_diagnostic_summaries" ( - "project_id" INTEGER NOT NULL, - "worktree_id" INT8 NOT NULL, - "path" VARCHAR NOT NULL, - "language_server_id" INT8 NOT NULL, - "error_count" INTEGER NOT NULL, - "warning_count" INTEGER NOT NULL, - PRIMARY KEY(project_id, worktree_id, path), - FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE -); -CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id"); -CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id"); - -CREATE TABLE "language_servers" ( - "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, - "id" INT8 NOT NULL, - "name" VARCHAR NOT NULL, - PRIMARY KEY(project_id, id) -); -CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id"); - -CREATE TABLE "project_collaborators" ( - "id" SERIAL PRIMARY KEY, - "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, - "connection_id" INTEGER NOT NULL, - "connection_epoch" UUID NOT NULL, - "user_id" INTEGER NOT NULL, - "replica_id" INTEGER NOT NULL, - "is_host" BOOLEAN NOT NULL -); -CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id"); -CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id"); -CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch"); - -CREATE TABLE "room_participants" ( - "id" SERIAL PRIMARY KEY, - "room_id" INTEGER NOT NULL REFERENCES rooms (id), - "user_id" INTEGER NOT NULL REFERENCES users (id), - "answering_connection_id" INTEGER, - "answering_connection_epoch" UUID, - "location_kind" INTEGER, - "location_project_id" INTEGER, - "initial_project_id" INTEGER, - "calling_user_id" INTEGER NOT NULL REFERENCES users (id), - "calling_connection_id" INTEGER NOT NULL, - "calling_connection_epoch" UUID NOT NULL -); -CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id"); -CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch"); -CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch"); diff --git a/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql b/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql deleted file mode 100644 index b154396df1259aa73b5e1a17c9db27d04510e062..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221125192125_add_added_to_mailing_list_to_signups.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "signups" - ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql b/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql deleted file mode 100644 index ed0cf972bc97f517fb878806b0929e8122b2b8a2..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221207165001_add_connection_lost_to_room_participants.sql +++ /dev/null @@ -1,7 +0,0 @@ -ALTER TABLE "room_participants" - ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE; - -CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id"); -CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch"); -CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id"); -CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch"); diff --git a/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql b/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql deleted file mode 100644 index f40ca81906f41e8c30530ce349f892bb69111657..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221213125710_index_room_participants_on_room_id.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id"); diff --git a/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql b/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql deleted file mode 100644 index 5e02f76ce25d59d799d5e5d9719e4e038d1bac02..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221214144346_change_epoch_from_uuid_to_integer.sql +++ /dev/null @@ -1,30 +0,0 @@ -CREATE TABLE servers ( - id SERIAL PRIMARY KEY, - environment VARCHAR NOT NULL -); - -DROP TABLE worktree_extensions; -DROP TABLE project_activity_periods; -DELETE from projects; -ALTER TABLE projects - DROP COLUMN host_connection_epoch, - ADD COLUMN host_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE; -CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); -CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); - -DELETE FROM project_collaborators; -ALTER TABLE project_collaborators - DROP COLUMN connection_epoch, - ADD COLUMN connection_server_id INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE; -CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id"); -CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id"); - -DELETE FROM room_participants; -ALTER TABLE room_participants - DROP COLUMN answering_connection_epoch, - DROP COLUMN calling_connection_epoch, - ADD COLUMN answering_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE, - ADD COLUMN calling_connection_server_id INTEGER REFERENCES servers (id) ON DELETE SET NULL; -CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id"); -CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id"); -CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id"); diff --git a/crates/collab/migrations/20221219181850_project_reconnection_support.sql b/crates/collab/migrations/20221219181850_project_reconnection_support.sql deleted file mode 100644 index 6efef5571c5855beb0c4b59d5f52ff92b323bb20..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20221219181850_project_reconnection_support.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE "worktree_entries" - ADD COLUMN "scan_id" INT8, - ADD COLUMN "is_deleted" BOOL; diff --git a/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql b/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql deleted file mode 100644 index 1894d888b92a89508981abe5de7f5fc3e710184f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230103200902_replace_is_completed_with_completed_scan_id.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE worktrees - ALTER COLUMN is_complete SET DEFAULT FALSE, - ADD COLUMN completed_scan_id INT8; diff --git a/crates/collab/migrations/20230202155735_followers.sql b/crates/collab/migrations/20230202155735_followers.sql deleted file mode 100644 index c82d6ba3bdaa4f2b2a60771bca7401c47678f247..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230202155735_followers.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE IF NOT EXISTS "followers" ( - "id" SERIAL PRIMARY KEY, - "room_id" INTEGER NOT NULL REFERENCES rooms (id) ON DELETE CASCADE, - "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, - "leader_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, - "leader_connection_id" INTEGER NOT NULL, - "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, - "follower_connection_id" INTEGER NOT NULL -); - -CREATE UNIQUE INDEX - "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id" -ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id"); - -CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); diff --git a/crates/collab/migrations/20230508211523_add-repository-entries.sql b/crates/collab/migrations/20230508211523_add-repository-entries.sql deleted file mode 100644 index 1e593479394c8434f56f3519b41ce2fa2a9fc2a3..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230508211523_add-repository-entries.sql +++ /dev/null @@ -1,13 +0,0 @@ -CREATE TABLE "worktree_repositories" ( - "project_id" INTEGER NOT NULL, - "worktree_id" INT8 NOT NULL, - "work_directory_id" INT8 NOT NULL, - "scan_id" INT8 NOT NULL, - "branch" VARCHAR, - "is_deleted" BOOL NOT NULL, - PRIMARY KEY(project_id, worktree_id, work_directory_id), - FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, - FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE -); -CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id"); -CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id"); diff --git a/crates/collab/migrations/20230511004019_add_repository_statuses.sql b/crates/collab/migrations/20230511004019_add_repository_statuses.sql deleted file mode 100644 index 862561c6866d361ca628924a15b925d97d0c39cb..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230511004019_add_repository_statuses.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE "worktree_repository_statuses" ( - "project_id" INTEGER NOT NULL, - "worktree_id" INT8 NOT NULL, - "work_directory_id" INT8 NOT NULL, - "repo_path" VARCHAR NOT NULL, - "status" INT8 NOT NULL, - "scan_id" INT8 NOT NULL, - "is_deleted" BOOL NOT NULL, - PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path), - FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, - FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE -); -CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id"); -CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id"); -CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id"); diff --git a/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql b/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql deleted file mode 100644 index 973a40af0f21908e5dbe0d5a30373629f24b7f1e..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230529164700_add_worktree_settings_files.sql +++ /dev/null @@ -1,10 +0,0 @@ -CREATE TABLE "worktree_settings_files" ( - "project_id" INTEGER NOT NULL, - "worktree_id" INT8 NOT NULL, - "path" VARCHAR NOT NULL, - "content" TEXT NOT NULL, - PRIMARY KEY(project_id, worktree_id, path), - FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE -); -CREATE INDEX "index_settings_files_on_project_id" ON "worktree_settings_files" ("project_id"); -CREATE INDEX "index_settings_files_on_project_id_and_wt_id" ON "worktree_settings_files" ("project_id", "worktree_id"); diff --git a/crates/collab/migrations/20230605191135_remove_repository_statuses.sql b/crates/collab/migrations/20230605191135_remove_repository_statuses.sql deleted file mode 100644 index 3e5f907c442d3604ebff5f2fbf60e9c34caa25d9..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230605191135_remove_repository_statuses.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "worktree_entries" -ADD "git_status" INT8; diff --git a/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql b/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql deleted file mode 100644 index e4348af0cc5c12a43fac3adecc106eb16a6de005..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230616134535_add_is_external_to_worktree_entries.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "worktree_entries" -ADD "is_external" BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20230727150500_add_channels.sql b/crates/collab/migrations/20230727150500_add_channels.sql deleted file mode 100644 index df981838bf72d7ef7392ed6f4e302ffdc57631db..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230727150500_add_channels.sql +++ /dev/null @@ -1,30 +0,0 @@ -DROP TABLE "channel_messages"; -DROP TABLE "channel_memberships"; -DROP TABLE "org_memberships"; -DROP TABLE "orgs"; -DROP TABLE "channels"; - -CREATE TABLE "channels" ( - "id" SERIAL PRIMARY KEY, - "name" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL DEFAULT now() -); - -CREATE TABLE "channel_paths" ( - "id_path" VARCHAR NOT NULL PRIMARY KEY, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE -); -CREATE INDEX "index_channel_paths_on_channel_id" ON "channel_paths" ("channel_id"); - -CREATE TABLE "channel_members" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "admin" BOOLEAN NOT NULL DEFAULT false, - "accepted" BOOLEAN NOT NULL DEFAULT false, - "updated_at" TIMESTAMP NOT NULL DEFAULT now() -); - -CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); - -ALTER TABLE rooms ADD COLUMN "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE; diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql deleted file mode 100644 index 5e6e7ce3393a628c86cbcdabf2349ebfa6667bd6..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230819154600_add_channel_buffers.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE TABLE "buffers" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "epoch" INTEGER NOT NULL DEFAULT 0 -); - -CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id"); - -CREATE TABLE "buffer_operations" ( - "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, - "epoch" INTEGER NOT NULL, - "replica_id" INTEGER NOT NULL, - "lamport_timestamp" INTEGER NOT NULL, - "value" BYTEA NOT NULL, - PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) -); - -CREATE TABLE "buffer_snapshots" ( - "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, - "epoch" INTEGER NOT NULL, - "text" TEXT NOT NULL, - "operation_serialization_version" INTEGER NOT NULL, - PRIMARY KEY(buffer_id, epoch) -); - -CREATE TABLE "channel_buffer_collaborators" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "connection_id" INTEGER NOT NULL, - "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, - "connection_lost" BOOLEAN NOT NULL DEFAULT FALSE, - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "replica_id" INTEGER NOT NULL -); - -CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); -CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); -CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/migrations/20230825190322_add_server_feature_flags.sql b/crates/collab/migrations/20230825190322_add_server_feature_flags.sql deleted file mode 100644 index fffde54a20e4869ccbef2093de4e7fe5044132e2..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230825190322_add_server_feature_flags.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE TABLE "feature_flags" ( - "id" SERIAL PRIMARY KEY, - "flag" VARCHAR(255) NOT NULL UNIQUE -); - -CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id"); - -CREATE TABLE "user_features" ( - "user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, - "feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE, - PRIMARY KEY (user_id, feature_id) -); - -CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id"); -CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id"); -CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id"); diff --git a/crates/collab/migrations/20230907114200_add_channel_messages.sql b/crates/collab/migrations/20230907114200_add_channel_messages.sql deleted file mode 100644 index abe7753ca69fb45a1f0a56b732963d8dc5605e31..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230907114200_add_channel_messages.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE IF NOT EXISTS "channel_messages" ( - "id" SERIAL PRIMARY KEY, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "sender_id" INTEGER NOT NULL REFERENCES users (id), - "body" TEXT NOT NULL, - "sent_at" TIMESTAMP, - "nonce" UUID NOT NULL -); -CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id"); -CREATE UNIQUE INDEX "index_channel_messages_on_nonce" ON "channel_messages" ("nonce"); - -CREATE TABLE IF NOT EXISTS "channel_chat_participants" ( - "id" SERIAL PRIMARY KEY, - "user_id" INTEGER NOT NULL REFERENCES users (id), - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "connection_id" INTEGER NOT NULL, - "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE -); -CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id"); diff --git a/crates/collab/migrations/20230925210437_add_channel_changes.sql b/crates/collab/migrations/20230925210437_add_channel_changes.sql deleted file mode 100644 index 250a9ac731b59489e85cf34a6754307bfac543ee..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230925210437_add_channel_changes.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE IF NOT EXISTS "observed_buffer_edits" ( - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, - "epoch" INTEGER NOT NULL, - "lamport_timestamp" INTEGER NOT NULL, - "replica_id" INTEGER NOT NULL, - PRIMARY KEY (user_id, buffer_id) -); - -CREATE UNIQUE INDEX "index_observed_buffer_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id"); - -CREATE TABLE IF NOT EXISTS "observed_channel_messages" ( - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "channel_message_id" INTEGER NOT NULL, - PRIMARY KEY (user_id, channel_id) -); - -CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id"); diff --git a/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql b/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql deleted file mode 100644 index 1493119e2a97ac42f5d69ebc82ac3d3d0dc4dd63..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20230926102500_add_participant_index_to_room_participants.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE room_participants ADD COLUMN participant_index INTEGER; diff --git a/crates/collab/migrations/20231004130100_create_notifications.sql b/crates/collab/migrations/20231004130100_create_notifications.sql deleted file mode 100644 index 93c282c631f3d5545593b7c71f013d8457cd088a..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231004130100_create_notifications.sql +++ /dev/null @@ -1,22 +0,0 @@ -CREATE TABLE "notification_kinds" ( - "id" SERIAL PRIMARY KEY, - "name" VARCHAR NOT NULL -); - -CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name"); - -CREATE TABLE notifications ( - "id" SERIAL PRIMARY KEY, - "created_at" TIMESTAMP NOT NULL DEFAULT now(), - "recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "kind" INTEGER NOT NULL REFERENCES notification_kinds (id), - "entity_id" INTEGER, - "content" TEXT, - "is_read" BOOLEAN NOT NULL DEFAULT FALSE, - "response" BOOLEAN -); - -CREATE INDEX - "index_notifications_on_recipient_id_is_read_kind_entity_id" - ON "notifications" - ("recipient_id", "is_read", "kind", "entity_id"); diff --git a/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql b/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql deleted file mode 100644 index 8f3a704adde0c385b26bd553d273eff322a17702..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231009181554_add_release_channel_to_rooms.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE rooms ADD COLUMN enviroment TEXT; diff --git a/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql b/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql deleted file mode 100644 index 21ec4cfbb75a574ad3704179a0ae14c8050149d1..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231010114600_add_unique_index_on_rooms_channel_id.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id"); diff --git a/crates/collab/migrations/20231011214412_add_guest_role.sql b/crates/collab/migrations/20231011214412_add_guest_role.sql deleted file mode 100644 index 17135471583a03bd6b39e82b3644b683cfc96d57..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231011214412_add_guest_role.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE channel_members ADD COLUMN role TEXT; -UPDATE channel_members SET role = CASE WHEN admin THEN 'admin' ELSE 'member' END; - -ALTER TABLE channels ADD COLUMN visibility TEXT NOT NULL DEFAULT 'members'; diff --git a/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql b/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql deleted file mode 100644 index be535ff7fa6e707182b8698647ecc92d9f976183..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231017185833_projects_room_id_fkey_on_delete_cascade.sql +++ /dev/null @@ -1,8 +0,0 @@ --- Add migration script here - -ALTER TABLE projects - DROP CONSTRAINT projects_room_id_fkey, - ADD CONSTRAINT projects_room_id_fkey - FOREIGN KEY (room_id) - REFERENCES rooms (id) - ON DELETE CASCADE; diff --git a/crates/collab/migrations/20231018102700_create_mentions.sql b/crates/collab/migrations/20231018102700_create_mentions.sql deleted file mode 100644 index 221a1748cfe16276deb4fc3dd2329983340307e7..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231018102700_create_mentions.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE "channel_message_mentions" ( - "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE, - "start_offset" INTEGER NOT NULL, - "end_offset" INTEGER NOT NULL, - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - PRIMARY KEY(message_id, start_offset) -); - --- We use 'on conflict update' with this index, so it should be per-user. -CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce"); -DROP INDEX "index_channel_messages_on_nonce"; diff --git a/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql b/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql deleted file mode 100644 index d9fc6c872267b89fe30c958b4b01bb7fdf1fc448..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20231024085546_move_channel_paths_to_channels_table.sql +++ /dev/null @@ -1,12 +0,0 @@ -ALTER TABLE channels ADD COLUMN parent_path TEXT; - -UPDATE channels -SET parent_path = substr( - channel_paths.id_path, - 2, - length(channel_paths.id_path) - length('/' || channel_paths.channel_id::text || '/') -) -FROM channel_paths -WHERE channel_paths.channel_id = channels.id; - -CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path"); diff --git a/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql b/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql deleted file mode 100644 index 2748e00ebaa18ec375111c648a7accafe90c5dbb..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240103025509_add_role_to_room_participants.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE room_participants ADD COLUMN role TEXT; diff --git a/crates/collab/migrations/20240111085546_fix_column_name.sql b/crates/collab/migrations/20240111085546_fix_column_name.sql deleted file mode 100644 index 3f32ee35c59107e12fda98159911dbba6e13434a..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240111085546_fix_column_name.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE rooms ADD COLUMN environment TEXT; diff --git a/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql b/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql deleted file mode 100644 index 8c79640cd88bfad58e5f9eafda90ae2d80e4e834..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240117150300_add_impersonator_to_access_tokens.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE access_tokens ADD COLUMN impersonated_user_id integer; diff --git a/crates/collab/migrations/20240122174606_add_contributors.sql b/crates/collab/migrations/20240122174606_add_contributors.sql deleted file mode 100644 index 16bec82d4f2bd0a1b3f4221366cd822ebcd70bb1..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240122174606_add_contributors.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE contributors ( - user_id INTEGER REFERENCES users(id), - signed_at TIMESTAMP NOT NULL DEFAULT NOW(), - PRIMARY KEY (user_id) -); diff --git a/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql b/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql deleted file mode 100644 index a9248d294a2178b73986ab20cd06383d0397626b..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240122224506_add_requires_zed_cla_column_to_channels.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE "channels" ADD COLUMN "requires_zed_cla" BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20240129193601_fix_parent_path_index.sql b/crates/collab/migrations/20240129193601_fix_parent_path_index.sql deleted file mode 100644 index 73dd6e37cdf82f2f5d77e8b3cd14a2fbe43f2320..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240129193601_fix_parent_path_index.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Add migration script here - -DROP INDEX index_channels_on_parent_path; -CREATE INDEX index_channels_on_parent_path ON channels (parent_path text_pattern_ops); diff --git a/crates/collab/migrations/20240203113741_add_reply_to_message.sql b/crates/collab/migrations/20240203113741_add_reply_to_message.sql deleted file mode 100644 index 6f40b62822bb4936f0f90e3be65f640e323d09d0..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240203113741_add_reply_to_message.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE channel_messages ADD reply_to_message_id INTEGER DEFAULT NULL diff --git a/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql b/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql deleted file mode 100644 index 09463c6e784d4e13df5376ad3cd53c8cb9ebcf45..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240207041417_add_in_call_column_to_room_participants.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Add migration script here - -ALTER TABLE room_participants ADD COLUMN in_call BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql b/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql deleted file mode 100644 index dc4897af48afd3fa9ebc403b6f8103b933993ac4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240213200201_remove_unused_room_columns.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Add migration script here -ALTER TABLE rooms DROP COLUMN enviroment; -ALTER TABLE rooms DROP COLUMN environment; -ALTER TABLE room_participants DROP COLUMN in_call; diff --git a/crates/collab/migrations/20240214102900_add_extensions.sql b/crates/collab/migrations/20240214102900_add_extensions.sql deleted file mode 100644 index b32094036d6a8993a8dbc6dc2407dec0e53aea47..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240214102900_add_extensions.sql +++ /dev/null @@ -1,22 +0,0 @@ -CREATE TABLE IF NOT EXISTS extensions ( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL, - external_id TEXT NOT NULL, - latest_version TEXT NOT NULL, - total_download_count BIGINT NOT NULL DEFAULT 0 -); - -CREATE TABLE IF NOT EXISTS extension_versions ( - extension_id INTEGER REFERENCES extensions(id), - version TEXT NOT NULL, - published_at TIMESTAMP NOT NULL DEFAULT now(), - authors TEXT NOT NULL, - repository TEXT NOT NULL, - description TEXT NOT NULL, - download_count BIGINT NOT NULL DEFAULT 0, - PRIMARY KEY(extension_id, version) -); - -CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id"); -CREATE INDEX "trigram_index_extensions_name" ON "extensions" USING GIN(name gin_trgm_ops); -CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count"); diff --git a/crates/collab/migrations/20240220234826_add_rate_buckets.sql b/crates/collab/migrations/20240220234826_add_rate_buckets.sql deleted file mode 100644 index 864a4373034fc53ea357f0b4d46b1b127a9f8db5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240220234826_add_rate_buckets.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE IF NOT EXISTS rate_buckets ( - user_id INT NOT NULL, - rate_limit_name VARCHAR(255) NOT NULL, - token_count INT NOT NULL, - last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL, - PRIMARY KEY (user_id, rate_limit_name), - CONSTRAINT fk_user - FOREIGN KEY (user_id) REFERENCES users(id) -); - -CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); diff --git a/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql b/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql deleted file mode 100644 index 1d07b07de7bf382fe0610ce170a639ab769567d4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240221151017_add_edited_at_field_to_channel_message.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE channel_messages ADD edited_at TIMESTAMP DEFAULT NULL; diff --git a/crates/collab/migrations/20240226163408_hosted_projects.sql b/crates/collab/migrations/20240226163408_hosted_projects.sql deleted file mode 100644 index c6ade7161cce7eafc00564f3e1b934be93b08186..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240226163408_hosted_projects.sql +++ /dev/null @@ -1,11 +0,0 @@ --- Add migration script here - -CREATE TABLE hosted_projects ( - id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - channel_id INT NOT NULL REFERENCES channels(id), - name TEXT NOT NULL, - visibility TEXT NOT NULL, - deleted_at TIMESTAMP NULL -); -CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id); -CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL); diff --git a/crates/collab/migrations/20240226164505_unique_channel_names.sql b/crates/collab/migrations/20240226164505_unique_channel_names.sql deleted file mode 100644 index c9d9f0a1cbf55249ee1fc80c5338bdc45381b46f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240226164505_unique_channel_names.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Add migration script here - -CREATE UNIQUE INDEX uix_channels_parent_path_name ON channels(parent_path, name) WHERE (parent_path IS NOT NULL AND parent_path != ''); diff --git a/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql b/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql deleted file mode 100644 index 69905d12f6d0e9945f291dc7b86f446bdaab08ac..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240227215556_hosted_projects_in_projects.sql +++ /dev/null @@ -1,3 +0,0 @@ --- Add migration script here -ALTER TABLE projects ALTER COLUMN host_user_id DROP NOT NULL; -ALTER TABLE projects ADD COLUMN hosted_project_id INTEGER REFERENCES hosted_projects(id) UNIQUE NULL; diff --git a/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql b/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql deleted file mode 100644 index a332a20d52c4564ac90989a60fc0dd850d86034c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240307163119_denormalize_buffer_ops.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Add migration script here - -ALTER TABLE buffers ADD COLUMN latest_operation_epoch INTEGER; -ALTER TABLE buffers ADD COLUMN latest_operation_lamport_timestamp INTEGER; -ALTER TABLE buffers ADD COLUMN latest_operation_replica_id INTEGER; - -WITH ops AS ( - SELECT DISTINCT ON (buffer_id) buffer_id, epoch, lamport_timestamp, replica_id - FROM buffer_operations - ORDER BY buffer_id, epoch DESC, lamport_timestamp DESC, replica_id DESC -) -UPDATE buffers -SET latest_operation_epoch = ops.epoch, - latest_operation_lamport_timestamp = ops.lamport_timestamp, - latest_operation_replica_id = ops.replica_id -FROM ops -WHERE buffers.id = ops.buffer_id; diff --git a/crates/collab/migrations/20240315182903_non_null_channel_role.sql b/crates/collab/migrations/20240315182903_non_null_channel_role.sql deleted file mode 100644 index 2d359f8058f0591e47bc733f1c8b4c60fe3a56cd..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240315182903_non_null_channel_role.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Add migration script here - -ALTER TABLE channel_members ALTER role SET NOT NULL; -ALTER TABLE channel_members DROP COLUMN admin; diff --git a/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql b/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql deleted file mode 100644 index 5703578b008817bacaa6e1956b28cdac7919e5fa..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240315183903_channel_parent_path_not_null.sql +++ /dev/null @@ -1,2 +0,0 @@ --- Add migration script here -ALTER TABLE channels ALTER parent_path SET NOT NULL; diff --git a/crates/collab/migrations/20240320124800_add_extension_schema_version.sql b/crates/collab/migrations/20240320124800_add_extension_schema_version.sql deleted file mode 100644 index 75fd0f40e4f9013f41e57cd094b3e93bf5f46101..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240320124800_add_extension_schema_version.sql +++ /dev/null @@ -1,2 +0,0 @@ --- Add migration script here -ALTER TABLE extension_versions ADD COLUMN schema_version INTEGER NOT NULL DEFAULT 0; diff --git a/crates/collab/migrations/20240321162658_add_devservers.sql b/crates/collab/migrations/20240321162658_add_devservers.sql deleted file mode 100644 index cb1ff4df405f9f1deb9d2c9e86f4234b9ba6d2b2..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240321162658_add_devservers.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE dev_servers ( - id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - channel_id INT NOT NULL REFERENCES channels(id), - name TEXT NOT NULL, - hashed_token TEXT NOT NULL -); -CREATE INDEX idx_dev_servers_on_channel_id ON dev_servers (channel_id); diff --git a/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql b/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql deleted file mode 100644 index 3b95323d262f1666dfbe8696a780dd5e8b674c99..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240335123500_add_extension_wasm_api_version.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE extension_versions ADD COLUMN wasm_api_version TEXT; diff --git a/crates/collab/migrations/20240402155003_add_dev_server_projects.sql b/crates/collab/migrations/20240402155003_add_dev_server_projects.sql deleted file mode 100644 index 003c43f4e27f4f7fb4cbfbb84b4be11d3a42ecc0..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240402155003_add_dev_server_projects.sql +++ /dev/null @@ -1,9 +0,0 @@ -CREATE TABLE remote_projects ( - id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - channel_id INT NOT NULL REFERENCES channels(id), - dev_server_id INT NOT NULL REFERENCES dev_servers(id), - name TEXT NOT NULL, - path TEXT NOT NULL -); - -ALTER TABLE projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id); diff --git a/crates/collab/migrations/20240409082755_create_embeddings.sql b/crates/collab/migrations/20240409082755_create_embeddings.sql deleted file mode 100644 index ae4b4bcb61c049ea75726fb92eaf2c795891370e..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240409082755_create_embeddings.sql +++ /dev/null @@ -1,9 +0,0 @@ -CREATE TABLE IF NOT EXISTS "embeddings" ( - "model" TEXT, - "digest" BYTEA, - "dimensions" FLOAT4[1536], - "retrieved_at" TIMESTAMP NOT NULL DEFAULT now(), - PRIMARY KEY ("model", "digest") -); - -CREATE INDEX IF NOT EXISTS "idx_retrieved_at_on_embeddings" ON "embeddings" ("retrieved_at"); diff --git a/crates/collab/migrations/20240412165156_dev_servers_per_user.sql b/crates/collab/migrations/20240412165156_dev_servers_per_user.sql deleted file mode 100644 index 7ef9e2fde0530aeca9598a0a1030eabdba3036f0..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240412165156_dev_servers_per_user.sql +++ /dev/null @@ -1,7 +0,0 @@ -DELETE FROM remote_projects; -DELETE FROM dev_servers; - -ALTER TABLE dev_servers DROP COLUMN channel_id; -ALTER TABLE dev_servers ADD COLUMN user_id INT NOT NULL REFERENCES users(id); - -ALTER TABLE remote_projects DROP COLUMN channel_id; diff --git a/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql b/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql deleted file mode 100644 index 923b948ceeb3ebe27502cb773a5232bc6cc39fc4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240417192746_unique_remote_projects_by_paths.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE remote_projects DROP COLUMN name; -ALTER TABLE remote_projects -ADD CONSTRAINT unique_path_constraint UNIQUE(dev_server_id, path); diff --git a/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql b/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql deleted file mode 100644 index 0d8e9de5e6ada47c617401b53dcca5d18b643aa6..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240502150229_rename_to_dev_server_projects.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE dev_server_projects ( - id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY (START WITH 100), - dev_server_id INT NOT NULL REFERENCES dev_servers(id) ON DELETE CASCADE, - path TEXT NOT NULL -); -INSERT INTO dev_server_projects OVERRIDING SYSTEM VALUE SELECT * FROM remote_projects; - -ALTER TABLE dev_server_projects ADD CONSTRAINT uix_dev_server_projects_dev_server_id_path UNIQUE(dev_server_id, path); - -ALTER TABLE projects ADD COLUMN dev_server_project_id INTEGER REFERENCES dev_server_projects(id); -UPDATE projects SET dev_server_project_id = remote_project_id; diff --git a/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql b/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql deleted file mode 100644 index 01ace43fab08bfeee9b7e665ccd28ebc50f27134..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240502180204_remove_old_remote_projects.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE projects DROP COLUMN remote_project_id; -DROP TABLE remote_projects; diff --git a/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql b/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql deleted file mode 100644 index 5085ca271bd130ab25d173391daae8542930ae27..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240514164510_store_ssh_connect_string.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE dev_servers ADD COLUMN ssh_connection_string TEXT; diff --git a/crates/collab/migrations/20240715230940_add_worktrees_to_dev_server_projects.sql b/crates/collab/migrations/20240715230940_add_worktrees_to_dev_server_projects.sql deleted file mode 100644 index 675df4885bb531722cf80a76bf93eac58add5b8c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240715230940_add_worktrees_to_dev_server_projects.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE dev_server_projects ADD COLUMN paths JSONB NULL; -UPDATE dev_server_projects SET paths = to_json(ARRAY[path]); -ALTER TABLE dev_server_projects ALTER COLUMN paths SET NOT NULL; -ALTER TABLE dev_server_projects ALTER COLUMN path DROP NOT NULL; diff --git a/crates/collab/migrations/20240729170526_add_billing_subscription.sql b/crates/collab/migrations/20240729170526_add_billing_subscription.sql deleted file mode 100644 index acec4b3ddb43c3a59e221daabad4b3161c867d09..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240729170526_add_billing_subscription.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE IF NOT EXISTS billing_subscriptions ( - id SERIAL PRIMARY KEY, - created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now(), - user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, - stripe_customer_id TEXT NOT NULL, - stripe_subscription_id TEXT NOT NULL, - stripe_subscription_status TEXT NOT NULL -); - -CREATE INDEX "ix_billing_subscriptions_on_user_id" ON billing_subscriptions (user_id); -CREATE INDEX "ix_billing_subscriptions_on_stripe_customer_id" ON billing_subscriptions (stripe_customer_id); -CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id); diff --git a/crates/collab/migrations/20240730014107_add_billing_customer.sql b/crates/collab/migrations/20240730014107_add_billing_customer.sql deleted file mode 100644 index 7f7d4a0f85608ba07595b81d90dd617a8acd4e0c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240730014107_add_billing_customer.sql +++ /dev/null @@ -1,18 +0,0 @@ -CREATE TABLE IF NOT EXISTS billing_customers ( - id SERIAL PRIMARY KEY, - created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now(), - user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, - stripe_customer_id TEXT NOT NULL -); - -CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id); -CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id); - --- Make `billing_subscriptions` reference `billing_customers` instead of having its --- own `user_id` and `stripe_customer_id`. -DROP INDEX IF EXISTS "ix_billing_subscriptions_on_user_id"; -DROP INDEX IF EXISTS "ix_billing_subscriptions_on_stripe_customer_id"; -ALTER TABLE billing_subscriptions DROP COLUMN user_id; -ALTER TABLE billing_subscriptions DROP COLUMN stripe_customer_id; -ALTER TABLE billing_subscriptions ADD COLUMN billing_customer_id INTEGER NOT NULL REFERENCES billing_customers (id) ON DELETE CASCADE; -CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id); diff --git a/crates/collab/migrations/20240730122654_add_last_stripe_event_id.sql b/crates/collab/migrations/20240730122654_add_last_stripe_event_id.sql deleted file mode 100644 index 477eadd742e3a128356fdeb75d5f35c1e8f77795..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240730122654_add_last_stripe_event_id.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE billing_customers ADD COLUMN last_stripe_event_id TEXT; -ALTER TABLE billing_subscriptions ADD COLUMN last_stripe_event_id TEXT; diff --git a/crates/collab/migrations/20240730182554_add_processed_stripe_events.sql b/crates/collab/migrations/20240730182554_add_processed_stripe_events.sql deleted file mode 100644 index baf1aa3122e8009a705708e9b8880f5eba5f36f5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240730182554_add_processed_stripe_events.sql +++ /dev/null @@ -1,11 +0,0 @@ -ALTER TABLE billing_customers DROP COLUMN last_stripe_event_id; -ALTER TABLE billing_subscriptions DROP COLUMN last_stripe_event_id; - -CREATE TABLE IF NOT EXISTS processed_stripe_events ( - stripe_event_id TEXT PRIMARY KEY, - stripe_event_type TEXT NOT NULL, - stripe_event_created_timestamp BIGINT NOT NULL, - processed_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now() -); - -CREATE INDEX "ix_processed_stripe_events_on_stripe_event_created_timestamp" ON processed_stripe_events (stripe_event_created_timestamp); diff --git a/crates/collab/migrations/20240731120800_add_stripe_cancel_at_to_billing_subscriptions.sql b/crates/collab/migrations/20240731120800_add_stripe_cancel_at_to_billing_subscriptions.sql deleted file mode 100644 index b09640bb1eabafda7b9eef9d0763db31d78d1e96..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240731120800_add_stripe_cancel_at_to_billing_subscriptions.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE billing_subscriptions ADD COLUMN stripe_cancel_at TIMESTAMP WITHOUT TIME ZONE; diff --git a/crates/collab/migrations/20240812073542_add_accepted_tos_at.sql b/crates/collab/migrations/20240812073542_add_accepted_tos_at.sql deleted file mode 100644 index 43fa0e7bbdcbfe1c0f05a5ae3a74966dcecd7f1b..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240812073542_add_accepted_tos_at.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE users ADD accepted_tos_at TIMESTAMP WITHOUT TIME ZONE; diff --git a/crates/collab/migrations/20240812204045_add_github_user_created_at_to_users.sql b/crates/collab/migrations/20240812204045_add_github_user_created_at_to_users.sql deleted file mode 100644 index a5f713ef7c489f9b87ea9c3b47345903f1e4b5b5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240812204045_add_github_user_created_at_to_users.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE "users" ADD COLUMN "github_user_created_at" TIMESTAMP WITHOUT TIME ZONE; diff --git a/crates/collab/migrations/20240816181658_add_enabled_for_all_to_feature_flags.sql b/crates/collab/migrations/20240816181658_add_enabled_for_all_to_feature_flags.sql deleted file mode 100644 index a56c87b97a41de260ccb4aa7d44fd35d2c026293..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240816181658_add_enabled_for_all_to_feature_flags.sql +++ /dev/null @@ -1 +0,0 @@ -alter table feature_flags add column enabled_for_all boolean not null default false; diff --git a/crates/collab/migrations/20240822215737_add_unique_constraint_on_github_user_id_on_users.sql b/crates/collab/migrations/20240822215737_add_unique_constraint_on_github_user_id_on_users.sql deleted file mode 100644 index 3b418f7e2669adfed83919605387d2c613b7f01a..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240822215737_add_unique_constraint_on_github_user_id_on_users.sql +++ /dev/null @@ -1,4 +0,0 @@ -alter table users alter column github_user_id set not null; - -drop index index_users_on_github_user_id; -create unique index uix_users_on_github_user_id on users (github_user_id); diff --git a/crates/collab/migrations/20240823155956_add_is_fifo_to_worktree_entries.sql b/crates/collab/migrations/20240823155956_add_is_fifo_to_worktree_entries.sql deleted file mode 100644 index af6fdac19d2498e990605d9851cb690dff41e830..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20240823155956_add_is_fifo_to_worktree_entries.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "worktree_entries" -ADD "is_fifo" BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20241002120231_add_local_settings_kind.sql b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql deleted file mode 100644 index aec4ffb8f8519b3fb30c90db4d9bd1221237d7c7..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241002120231_add_local_settings_kind.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE "worktree_settings_files" ADD COLUMN "kind" VARCHAR; diff --git a/crates/collab/migrations/20241009190639_add_billing_preferences.sql b/crates/collab/migrations/20241009190639_add_billing_preferences.sql deleted file mode 100644 index 9aa5a1a303668eac7032555f0ea04c6c34b1718f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241009190639_add_billing_preferences.sql +++ /dev/null @@ -1,8 +0,0 @@ -create table if not exists billing_preferences ( - id serial primary key, - created_at timestamp without time zone not null default now(), - user_id integer not null references users(id) on delete cascade, - max_monthly_llm_usage_spending_in_cents integer not null -); - -create unique index "uix_billing_preferences_on_user_id" on billing_preferences (user_id); diff --git a/crates/collab/migrations/20241019184824_adjust_symlink_data.sql b/crates/collab/migrations/20241019184824_adjust_symlink_data.sql deleted file mode 100644 index a38dd21cde85e6ac48c2e45afd5397a81952e712..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241019184824_adjust_symlink_data.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE worktree_entries ADD COLUMN canonical_path text; -ALTER TABLE worktree_entries ALTER COLUMN is_symlink SET DEFAULT false; diff --git a/crates/collab/migrations/20241021202606_add_custom_llm_monthly_allowance_in_cents_to_users.sql b/crates/collab/migrations/20241021202606_add_custom_llm_monthly_allowance_in_cents_to_users.sql deleted file mode 100644 index 60a9bfa91074b4bff08e2cd686e6203beb6b2cf4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241021202606_add_custom_llm_monthly_allowance_in_cents_to_users.sql +++ /dev/null @@ -1 +0,0 @@ -alter table users add column custom_llm_monthly_allowance_in_cents integer; diff --git a/crates/collab/migrations/20241023201725_remove_dev_servers.sql b/crates/collab/migrations/20241023201725_remove_dev_servers.sql deleted file mode 100644 index c5da673a29b1e08b371c712c2786676829f3c25f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241023201725_remove_dev_servers.sql +++ /dev/null @@ -1,6 +0,0 @@ -ALTER TABLE projects DROP COLUMN dev_server_project_id; -ALTER TABLE projects DROP COLUMN hosted_project_id; - -DROP TABLE hosted_projects; -DROP TABLE dev_server_projects; -DROP TABLE dev_servers; diff --git a/crates/collab/migrations/20241121185750_add_breakpoints.sql b/crates/collab/migrations/20241121185750_add_breakpoints.sql deleted file mode 100644 index 4b3071457392f433959ce8270b4dd91f6b99bb78..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20241121185750_add_breakpoints.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE IF NOT EXISTS "breakpoints" ( - "id" SERIAL PRIMARY KEY, - "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, - "position" INTEGER NOT NULL, - "log_message" TEXT NULL, - "worktree_id" BIGINT NOT NULL, - "path" TEXT NOT NULL, - "kind" VARCHAR NOT NULL -); - -CREATE INDEX "index_breakpoints_on_project_id" ON "breakpoints" ("project_id"); diff --git a/crates/collab/migrations/20250108184547_add_stripe_cancellation_reason_to_billing_subscriptions.sql b/crates/collab/migrations/20250108184547_add_stripe_cancellation_reason_to_billing_subscriptions.sql deleted file mode 100644 index 31686f56bbc06e21797e5389664bf2b2850a6263..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250108184547_add_stripe_cancellation_reason_to_billing_subscriptions.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_subscriptions -add column stripe_cancellation_reason text; diff --git a/crates/collab/migrations/20250113230049_expand_git_status_information.sql b/crates/collab/migrations/20250113230049_expand_git_status_information.sql deleted file mode 100644 index eada39fe304020556bd39dedf640a890144ae5dd..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250113230049_expand_git_status_information.sql +++ /dev/null @@ -1,13 +0,0 @@ -ALTER TABLE worktree_repository_statuses -ADD COLUMN status_kind INTEGER, -ADD COLUMN first_status INTEGER, -ADD COLUMN second_status INTEGER; - -UPDATE worktree_repository_statuses -SET - status_kind = 0; - -ALTER TABLE worktree_repository_statuses -ALTER COLUMN status_kind -SET - NOT NULL; diff --git a/crates/collab/migrations/20250117100620_add_user_name.sql b/crates/collab/migrations/20250117100620_add_user_name.sql deleted file mode 100644 index fff7f95b6052c54c47bcc0c40ef49c65e35b95b4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250117100620_add_user_name.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE users ADD COLUMN name TEXT; diff --git a/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql b/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql deleted file mode 100644 index 07c40303994395e8f43c33df130955e0d82ab627..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250204224004_add_has_overdue_invoices_to_billing_customers.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_customers -add column has_overdue_invoices bool not null default false; diff --git a/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql b/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql deleted file mode 100644 index 50dcb0508f35cc69a87bbcbf83fed9809f539b41..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250205192813_add_provides_fields_to_extension_versions.sql +++ /dev/null @@ -1,10 +0,0 @@ -alter table extension_versions -add column provides_themes bool not null default false, -add column provides_icon_themes bool not null default false, -add column provides_languages bool not null default false, -add column provides_grammars bool not null default false, -add column provides_language_servers bool not null default false, -add column provides_context_servers bool not null default false, -add column provides_slash_commands bool not null default false, -add column provides_indexed_docs_providers bool not null default false, -add column provides_snippets bool not null default false; diff --git a/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql b/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql deleted file mode 100644 index e6e0770bba8cbbb7649689705c526ead9629518d..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250205232017_add_conflicts_to_repositories.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE worktree_repositories -ADD COLUMN current_merge_conflicts VARCHAR NULL; diff --git a/crates/collab/migrations/20250210223746_add_branch_summary.sql b/crates/collab/migrations/20250210223746_add_branch_summary.sql deleted file mode 100644 index 3294f38b94114a73713b6282d401b97fcdc383e5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250210223746_add_branch_summary.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE worktree_repositories -ADD COLUMN worktree_repositories VARCHAR NULL; diff --git a/crates/collab/migrations/20250212060936_add_worktree_branch_summary.sql b/crates/collab/migrations/20250212060936_add_worktree_branch_summary.sql deleted file mode 100644 index d7e3c04e2ff7844ed8d47907b0d21b64ae7d9a1c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250212060936_add_worktree_branch_summary.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE worktree_repositories ADD COLUMN branch_summary TEXT NULL; diff --git a/crates/collab/migrations/20250319182812_create_project_repositories.sql b/crates/collab/migrations/20250319182812_create_project_repositories.sql deleted file mode 100644 index 8ca8c3444e60ccc4105e01e7a0d035930d57da4d..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250319182812_create_project_repositories.sql +++ /dev/null @@ -1,32 +0,0 @@ -CREATE TABLE "project_repositories" ( - "project_id" INTEGER NOT NULL, - "abs_path" VARCHAR, - "id" INT8 NOT NULL, - "legacy_worktree_id" INT8, - "entry_ids" VARCHAR, - "branch" VARCHAR, - "scan_id" INT8 NOT NULL, - "is_deleted" BOOL NOT NULL, - "current_merge_conflicts" VARCHAR, - "branch_summary" VARCHAR, - PRIMARY KEY (project_id, id) -); - -CREATE INDEX "index_project_repositories_on_project_id" ON "project_repositories" ("project_id"); - -CREATE TABLE "project_repository_statuses" ( - "project_id" INTEGER NOT NULL, - "repository_id" INT8 NOT NULL, - "repo_path" VARCHAR NOT NULL, - "status" INT8 NOT NULL, - "status_kind" INT4 NOT NULL, - "first_status" INT4 NULL, - "second_status" INT4 NULL, - "scan_id" INT8 NOT NULL, - "is_deleted" BOOL NOT NULL, - PRIMARY KEY (project_id, repository_id, repo_path) -); - -CREATE INDEX "index_project_repos_statuses_on_project_id" ON "project_repository_statuses" ("project_id"); - -CREATE INDEX "index_project_repos_statuses_on_project_id_and_repo_id" ON "project_repository_statuses" ("project_id", "repository_id"); diff --git a/crates/collab/migrations/20250415164141_add_kind_and_period_to_billing_subscriptions.sql b/crates/collab/migrations/20250415164141_add_kind_and_period_to_billing_subscriptions.sql deleted file mode 100644 index b91431b28ba2ed2ce9fcf13a72938aad46330f04..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250415164141_add_kind_and_period_to_billing_subscriptions.sql +++ /dev/null @@ -1,4 +0,0 @@ -alter table billing_subscriptions - add column kind text, - add column stripe_current_period_start bigint, - add column stripe_current_period_end bigint; diff --git a/crates/collab/migrations/20250422194500_add_trial_started_at_to_billing_customers.sql b/crates/collab/migrations/20250422194500_add_trial_started_at_to_billing_customers.sql deleted file mode 100644 index 34a159cf65a2a92aa21b75daa62c9a0d91023bcc..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250422194500_add_trial_started_at_to_billing_customers.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_customers - add column trial_started_at timestamp without time zone; diff --git a/crates/collab/migrations/20250423150129_add_head_commit_details_to_project_repositories.sql b/crates/collab/migrations/20250423150129_add_head_commit_details_to_project_repositories.sql deleted file mode 100644 index c37fed224229d99403868f462a23fe811b27fda6..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250423150129_add_head_commit_details_to_project_repositories.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table project_repositories - add column head_commit_details varchar; diff --git a/crates/collab/migrations/20250425201930_add_model_request_overages_to_billing_preferences.sql b/crates/collab/migrations/20250425201930_add_model_request_overages_to_billing_preferences.sql deleted file mode 100644 index 86e35c9202b9184796a8c3d2463f20f89766ec8c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250425201930_add_model_request_overages_to_billing_preferences.sql +++ /dev/null @@ -1,3 +0,0 @@ -alter table billing_preferences - add column model_request_overages_enabled bool not null default false, - add column model_request_overages_spend_limit_in_cents integer not null default 0; diff --git a/crates/collab/migrations/20250530175450_add_channel_order.sql b/crates/collab/migrations/20250530175450_add_channel_order.sql deleted file mode 100644 index 977a4611cdb75d0e53c8d1c132290f9da7469dc5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250530175450_add_channel_order.sql +++ /dev/null @@ -1,16 +0,0 @@ --- Add channel_order column to channels table with default value -ALTER TABLE channels ADD COLUMN channel_order INTEGER NOT NULL DEFAULT 1; - --- Update channel_order for existing channels using ROW_NUMBER for deterministic ordering -UPDATE channels -SET channel_order = ( - SELECT ROW_NUMBER() OVER ( - PARTITION BY parent_path - ORDER BY name, id - ) - FROM channels c2 - WHERE c2.id = channels.id -); - --- Create index for efficient ordering queries -CREATE INDEX "index_channels_on_parent_path_and_order" ON "channels" ("parent_path", "channel_order"); diff --git a/crates/collab/migrations/20250612153105_add_collaborator_commit_email.sql b/crates/collab/migrations/20250612153105_add_collaborator_commit_email.sql deleted file mode 100644 index 73876e89652deea8bbebf354b99cb2d792894130..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250612153105_add_collaborator_commit_email.sql +++ /dev/null @@ -1,4 +0,0 @@ -alter table project_collaborators - add column committer_name varchar; -alter table project_collaborators - add column committer_email varchar; diff --git a/crates/collab/migrations/20250617082236_add_debug_adapter_provides_field_to_extensions.sql b/crates/collab/migrations/20250617082236_add_debug_adapter_provides_field_to_extensions.sql deleted file mode 100644 index 8455a82f9ee6b5fdf8cfba3da08c880018061a43..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250617082236_add_debug_adapter_provides_field_to_extensions.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table extension_versions -add column provides_debug_adapters bool not null default false diff --git a/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql b/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql deleted file mode 100644 index 3c399924b96891d490792fb36b61a034f8dce97f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table extension_versions -add column provides_agent_servers bool not null default false diff --git a/crates/collab/migrations/20250702185129_add_cascading_delete_to_repository_entries.sql b/crates/collab/migrations/20250702185129_add_cascading_delete_to_repository_entries.sql deleted file mode 100644 index 6d898c481199f4770ab7df5ce66c08e2fdf42423..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250702185129_add_cascading_delete_to_repository_entries.sql +++ /dev/null @@ -1,25 +0,0 @@ -DELETE FROM project_repositories -WHERE project_id NOT IN (SELECT id FROM projects); - -ALTER TABLE project_repositories - ADD CONSTRAINT fk_project_repositories_project_id - FOREIGN KEY (project_id) - REFERENCES projects (id) - ON DELETE CASCADE - NOT VALID; - -ALTER TABLE project_repositories - VALIDATE CONSTRAINT fk_project_repositories_project_id; - -DELETE FROM project_repository_statuses -WHERE project_id NOT IN (SELECT id FROM projects); - -ALTER TABLE project_repository_statuses - ADD CONSTRAINT fk_project_repository_statuses_project_id - FOREIGN KEY (project_id) - REFERENCES projects (id) - ON DELETE CASCADE - NOT VALID; - -ALTER TABLE project_repository_statuses - VALIDATE CONSTRAINT fk_project_repository_statuses_project_id; diff --git a/crates/collab/migrations/20250707182700_add_access_tokens_cascade_delete_on_user.sql b/crates/collab/migrations/20250707182700_add_access_tokens_cascade_delete_on_user.sql deleted file mode 100644 index ae0ffe24f6322196358225ff4159df9d1cfa6298..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250707182700_add_access_tokens_cascade_delete_on_user.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE access_tokens DROP CONSTRAINT access_tokens_user_id_fkey; -ALTER TABLE access_tokens ADD CONSTRAINT access_tokens_user_id_fkey - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; diff --git a/crates/collab/migrations/20250804080620_language_server_capabilities.sql b/crates/collab/migrations/20250804080620_language_server_capabilities.sql deleted file mode 100644 index f74f094ed25d488720f2f85f30b6762f83647b02..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250804080620_language_server_capabilities.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER TABLE language_servers - ADD COLUMN capabilities TEXT NOT NULL DEFAULT '{}'; - -ALTER TABLE language_servers - ALTER COLUMN capabilities DROP DEFAULT; diff --git a/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql b/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql deleted file mode 100644 index e372723d6d5f5e822a2e437cfac4b95bc2023998..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250816124707_make_admin_required_on_users.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table users -alter column admin set not null; diff --git a/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql b/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql deleted file mode 100644 index ea5e4de52a829413030bb5e206f5c7401381adcf..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250816133027_add_orb_customer_id_to_billing_customers.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_customers - add column orb_customer_id text; diff --git a/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql b/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql deleted file mode 100644 index f51a33ed30d7fb88bc9dc6c82e7217c7e4634b28..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250816135346_drop_rate_buckets_table.sql +++ /dev/null @@ -1 +0,0 @@ -drop table rate_buckets; diff --git a/crates/collab/migrations/20250818192156_add_git_merge_message.sql b/crates/collab/migrations/20250818192156_add_git_merge_message.sql deleted file mode 100644 index 335ea2f82493082e0e20d7762b5282696dc50224..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250818192156_add_git_merge_message.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE "project_repositories" ADD COLUMN "merge_message" VARCHAR; diff --git a/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql b/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql deleted file mode 100644 index 317f6a7653e3d1762f74e795a17d2f99b3831201..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250819022421_add_orb_subscription_id_to_billing_subscriptions.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_subscriptions - add column orb_subscription_id text; diff --git a/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql b/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql deleted file mode 100644 index cf3b79da60be98da8dd78a2bcb01f7532be7fc59..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250819225916_make_stripe_fields_optional_on_billing_subscription.sql +++ /dev/null @@ -1,3 +0,0 @@ -alter table billing_subscriptions - alter column stripe_subscription_id drop not null, - alter column stripe_subscription_status drop not null; diff --git a/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql b/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql deleted file mode 100644 index 89a42ab82bd97f487a426ef1fa0a08aa5b0c8396..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql +++ /dev/null @@ -1,4 +0,0 @@ -alter table billing_subscriptions - add column orb_subscription_status text, - add column orb_current_billing_period_start_date timestamp without time zone, - add column orb_current_billing_period_end_date timestamp without time zone; diff --git a/crates/collab/migrations/20250827084812_worktree_in_servers.sql b/crates/collab/migrations/20250827084812_worktree_in_servers.sql deleted file mode 100644 index d4c6ffbbcccb2d2f23654cfc287b45bb8ea20508..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250827084812_worktree_in_servers.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE language_servers - ADD COLUMN worktree_id BIGINT; diff --git a/crates/collab/migrations/20250913035238_add_orb_cancellation_date_to_billing_subscriptions.sql b/crates/collab/migrations/20250913035238_add_orb_cancellation_date_to_billing_subscriptions.sql deleted file mode 100644 index 56144237421d49fa68545f9689bdb1688603739a..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250913035238_add_orb_cancellation_date_to_billing_subscriptions.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_subscriptions - add column orb_cancellation_date timestamp without time zone; diff --git a/crates/collab/migrations/20250914022147_add_orb_portal_url_to_billing_customers.sql b/crates/collab/migrations/20250914022147_add_orb_portal_url_to_billing_customers.sql deleted file mode 100644 index 2de05740410f5d13f7cd510a85af24dd2ff171b6..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250914022147_add_orb_portal_url_to_billing_customers.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter table billing_customers - add column orb_portal_url text; diff --git a/crates/collab/migrations/20250916173002_add_path_style_to_project.sql b/crates/collab/migrations/20250916173002_add_path_style_to_project.sql deleted file mode 100644 index b1244818f14403d38af577be4b14b1a8a765e07b..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20250916173002_add_path_style_to_project.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE projects ADD COLUMN windows_paths BOOLEAN DEFAULT FALSE; diff --git a/crates/collab/migrations/20251002214229_add_token_spend_in_cents_to_billing_subscriptions.sql b/crates/collab/migrations/20251002214229_add_token_spend_in_cents_to_billing_subscriptions.sql deleted file mode 100644 index ccae01e2833fedd530f290c55d2852c33de6957c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20251002214229_add_token_spend_in_cents_to_billing_subscriptions.sql +++ /dev/null @@ -1,3 +0,0 @@ -alter table billing_subscriptions - add column token_spend_in_cents integer, - add column token_spend_in_cents_updated_at timestamp without time zone; diff --git a/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql b/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql deleted file mode 100644 index 5b4207aeea500595c66508fa88a20662bc5693c1..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE "worktree_entries" -ADD "is_hidden" BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/migrations/20251110214057_drop_channel_messages.sql b/crates/collab/migrations/20251110214057_drop_channel_messages.sql deleted file mode 100644 index 468534542fbb7cee04aee985bfe2143f30d219ad..0000000000000000000000000000000000000000 --- a/crates/collab/migrations/20251110214057_drop_channel_messages.sql +++ /dev/null @@ -1,3 +0,0 @@ -drop table observed_channel_messages; -drop table channel_message_mentions; -drop table channel_messages; diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql new file mode 100644 index 0000000000000000000000000000000000000000..ed9c9d16dbdf2fbe2e69134f407ee5365236161b --- /dev/null +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -0,0 +1,899 @@ +CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public; + +CREATE TABLE public.access_tokens ( + id integer NOT NULL, + user_id integer, + hash character varying(128), + impersonated_user_id integer +); + +CREATE SEQUENCE public.access_tokens_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.access_tokens_id_seq OWNED BY public.access_tokens.id; + +CREATE TABLE public.breakpoints ( + id integer NOT NULL, + project_id integer NOT NULL, + "position" integer NOT NULL, + log_message text, + worktree_id bigint NOT NULL, + path text NOT NULL, + kind character varying NOT NULL +); + +CREATE SEQUENCE public.breakpoints_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.breakpoints_id_seq OWNED BY public.breakpoints.id; + +CREATE TABLE public.buffer_operations ( + buffer_id integer NOT NULL, + epoch integer NOT NULL, + replica_id integer NOT NULL, + lamport_timestamp integer NOT NULL, + value bytea NOT NULL +); + +CREATE TABLE public.buffer_snapshots ( + buffer_id integer NOT NULL, + epoch integer NOT NULL, + text text NOT NULL, + operation_serialization_version integer NOT NULL +); + +CREATE TABLE public.buffers ( + id integer NOT NULL, + channel_id integer NOT NULL, + epoch integer DEFAULT 0 NOT NULL, + latest_operation_epoch integer, + latest_operation_lamport_timestamp integer, + latest_operation_replica_id integer +); + +CREATE SEQUENCE public.buffers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.buffers_id_seq OWNED BY public.buffers.id; + +CREATE TABLE public.channel_buffer_collaborators ( + id integer NOT NULL, + channel_id integer NOT NULL, + connection_id integer NOT NULL, + connection_server_id integer NOT NULL, + connection_lost boolean DEFAULT false NOT NULL, + user_id integer NOT NULL, + replica_id integer NOT NULL +); + +CREATE SEQUENCE public.channel_buffer_collaborators_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.channel_buffer_collaborators_id_seq OWNED BY public.channel_buffer_collaborators.id; + +CREATE TABLE public.channel_chat_participants ( + id integer NOT NULL, + user_id integer NOT NULL, + channel_id integer NOT NULL, + connection_id integer NOT NULL, + connection_server_id integer NOT NULL +); + +CREATE SEQUENCE public.channel_chat_participants_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.channel_chat_participants_id_seq OWNED BY public.channel_chat_participants.id; + +CREATE TABLE public.channel_members ( + id integer NOT NULL, + channel_id integer NOT NULL, + user_id integer NOT NULL, + accepted boolean DEFAULT false NOT NULL, + updated_at timestamp without time zone DEFAULT now() NOT NULL, + role text NOT NULL +); + +CREATE SEQUENCE public.channel_members_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.channel_members_id_seq OWNED BY public.channel_members.id; + +CREATE TABLE public.channels ( + id integer NOT NULL, + name character varying NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + visibility text DEFAULT 'members'::text NOT NULL, + parent_path text NOT NULL, + requires_zed_cla boolean DEFAULT false NOT NULL, + channel_order integer DEFAULT 1 NOT NULL +); + +CREATE SEQUENCE public.channels_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.channels_id_seq OWNED BY public.channels.id; + +CREATE TABLE public.contacts ( + id integer NOT NULL, + user_id_a integer NOT NULL, + user_id_b integer NOT NULL, + a_to_b boolean NOT NULL, + should_notify boolean NOT NULL, + accepted boolean NOT NULL +); + +CREATE SEQUENCE public.contacts_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.contacts_id_seq OWNED BY public.contacts.id; + +CREATE TABLE public.contributors ( + user_id integer NOT NULL, + signed_at timestamp without time zone DEFAULT now() NOT NULL +); + +CREATE TABLE public.extension_versions ( + extension_id integer NOT NULL, + version text NOT NULL, + published_at timestamp without time zone DEFAULT now() NOT NULL, + authors text NOT NULL, + repository text NOT NULL, + description text NOT NULL, + download_count bigint DEFAULT 0 NOT NULL, + schema_version integer DEFAULT 0 NOT NULL, + wasm_api_version text, + provides_themes boolean DEFAULT false NOT NULL, + provides_icon_themes boolean DEFAULT false NOT NULL, + provides_languages boolean DEFAULT false NOT NULL, + provides_grammars boolean DEFAULT false NOT NULL, + provides_language_servers boolean DEFAULT false NOT NULL, + provides_context_servers boolean DEFAULT false NOT NULL, + provides_slash_commands boolean DEFAULT false NOT NULL, + provides_indexed_docs_providers boolean DEFAULT false NOT NULL, + provides_snippets boolean DEFAULT false NOT NULL, + provides_debug_adapters boolean DEFAULT false NOT NULL, + provides_agent_servers boolean DEFAULT false NOT NULL +); + +CREATE TABLE public.extensions ( + id integer NOT NULL, + name text NOT NULL, + external_id text NOT NULL, + latest_version text NOT NULL, + total_download_count bigint DEFAULT 0 NOT NULL +); + +CREATE SEQUENCE public.extensions_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.extensions_id_seq OWNED BY public.extensions.id; + +CREATE TABLE public.feature_flags ( + id integer NOT NULL, + flag character varying(255) NOT NULL, + enabled_for_all boolean DEFAULT false NOT NULL +); + +CREATE SEQUENCE public.feature_flags_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.feature_flags_id_seq OWNED BY public.feature_flags.id; + +CREATE TABLE public.followers ( + id integer NOT NULL, + room_id integer NOT NULL, + project_id integer NOT NULL, + leader_connection_server_id integer NOT NULL, + leader_connection_id integer NOT NULL, + follower_connection_server_id integer NOT NULL, + follower_connection_id integer NOT NULL +); + +CREATE SEQUENCE public.followers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.followers_id_seq OWNED BY public.followers.id; + +CREATE TABLE public.language_servers ( + project_id integer NOT NULL, + id bigint NOT NULL, + name character varying NOT NULL, + capabilities text NOT NULL, + worktree_id bigint +); + +CREATE TABLE public.notification_kinds ( + id integer NOT NULL, + name character varying NOT NULL +); + +CREATE SEQUENCE public.notification_kinds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.notification_kinds_id_seq OWNED BY public.notification_kinds.id; + +CREATE TABLE public.notifications ( + id integer NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + recipient_id integer NOT NULL, + kind integer NOT NULL, + entity_id integer, + content text, + is_read boolean DEFAULT false NOT NULL, + response boolean +); + +CREATE SEQUENCE public.notifications_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.notifications_id_seq OWNED BY public.notifications.id; + +CREATE TABLE public.observed_buffer_edits ( + user_id integer NOT NULL, + buffer_id integer NOT NULL, + epoch integer NOT NULL, + lamport_timestamp integer NOT NULL, + replica_id integer NOT NULL +); + +CREATE TABLE public.project_collaborators ( + id integer NOT NULL, + project_id integer NOT NULL, + connection_id integer NOT NULL, + user_id integer NOT NULL, + replica_id integer NOT NULL, + is_host boolean NOT NULL, + connection_server_id integer NOT NULL, + committer_name character varying, + committer_email character varying +); + +CREATE SEQUENCE public.project_collaborators_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.project_collaborators_id_seq OWNED BY public.project_collaborators.id; + +CREATE TABLE public.project_repositories ( + project_id integer NOT NULL, + abs_path character varying, + id bigint NOT NULL, + legacy_worktree_id bigint, + entry_ids character varying, + branch character varying, + scan_id bigint NOT NULL, + is_deleted boolean NOT NULL, + current_merge_conflicts character varying, + branch_summary character varying, + head_commit_details character varying, + merge_message character varying +); + +CREATE TABLE public.project_repository_statuses ( + project_id integer NOT NULL, + repository_id bigint NOT NULL, + repo_path character varying NOT NULL, + status bigint NOT NULL, + status_kind integer NOT NULL, + first_status integer, + second_status integer, + scan_id bigint NOT NULL, + is_deleted boolean NOT NULL +); + +CREATE TABLE public.projects ( + id integer NOT NULL, + host_user_id integer, + unregistered boolean DEFAULT false NOT NULL, + room_id integer, + host_connection_id integer, + host_connection_server_id integer, + windows_paths boolean DEFAULT false +); + +CREATE SEQUENCE public.projects_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.projects_id_seq OWNED BY public.projects.id; + +CREATE TABLE public.room_participants ( + id integer NOT NULL, + room_id integer NOT NULL, + user_id integer NOT NULL, + answering_connection_id integer, + location_kind integer, + location_project_id integer, + initial_project_id integer, + calling_user_id integer NOT NULL, + calling_connection_id integer NOT NULL, + answering_connection_lost boolean DEFAULT false NOT NULL, + answering_connection_server_id integer, + calling_connection_server_id integer, + participant_index integer, + role text +); + +CREATE SEQUENCE public.room_participants_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.room_participants_id_seq OWNED BY public.room_participants.id; + +CREATE TABLE public.rooms ( + id integer NOT NULL, + live_kit_room character varying NOT NULL, + channel_id integer +); + +CREATE SEQUENCE public.rooms_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.rooms_id_seq OWNED BY public.rooms.id; + +CREATE TABLE public.servers ( + id integer NOT NULL, + environment character varying NOT NULL +); + +CREATE SEQUENCE public.servers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.servers_id_seq OWNED BY public.servers.id; + +CREATE TABLE public.user_features ( + user_id integer NOT NULL, + feature_id integer NOT NULL +); + +CREATE TABLE public.users ( + id integer NOT NULL, + github_login character varying, + admin boolean NOT NULL, + email_address character varying(255) DEFAULT NULL::character varying, + invite_code character varying(64), + invite_count integer DEFAULT 0 NOT NULL, + inviter_id integer, + connected_once boolean DEFAULT false NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + github_user_id integer NOT NULL, + metrics_id uuid DEFAULT gen_random_uuid() NOT NULL, + accepted_tos_at timestamp without time zone, + github_user_created_at timestamp without time zone, + custom_llm_monthly_allowance_in_cents integer, + name text +); + +CREATE SEQUENCE public.users_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + +CREATE TABLE public.worktree_diagnostic_summaries ( + project_id integer NOT NULL, + worktree_id bigint NOT NULL, + path character varying NOT NULL, + language_server_id bigint NOT NULL, + error_count integer NOT NULL, + warning_count integer NOT NULL +); + +CREATE TABLE public.worktree_entries ( + project_id integer NOT NULL, + worktree_id bigint NOT NULL, + id bigint NOT NULL, + is_dir boolean NOT NULL, + path character varying NOT NULL, + inode bigint NOT NULL, + mtime_seconds bigint NOT NULL, + mtime_nanos integer NOT NULL, + is_symlink boolean DEFAULT false NOT NULL, + is_ignored boolean NOT NULL, + scan_id bigint, + is_deleted boolean, + git_status bigint, + is_external boolean DEFAULT false NOT NULL, + is_fifo boolean DEFAULT false NOT NULL, + canonical_path text, + is_hidden boolean DEFAULT false NOT NULL +); + +CREATE TABLE public.worktree_settings_files ( + project_id integer NOT NULL, + worktree_id bigint NOT NULL, + path character varying NOT NULL, + content text NOT NULL, + kind character varying +); + +CREATE TABLE public.worktrees ( + project_id integer NOT NULL, + id bigint NOT NULL, + root_name character varying NOT NULL, + abs_path character varying NOT NULL, + visible boolean NOT NULL, + scan_id bigint NOT NULL, + is_complete boolean DEFAULT false NOT NULL, + completed_scan_id bigint +); + +ALTER TABLE ONLY public.access_tokens ALTER COLUMN id SET DEFAULT nextval('public.access_tokens_id_seq'::regclass); + +ALTER TABLE ONLY public.breakpoints ALTER COLUMN id SET DEFAULT nextval('public.breakpoints_id_seq'::regclass); + +ALTER TABLE ONLY public.buffers ALTER COLUMN id SET DEFAULT nextval('public.buffers_id_seq'::regclass); + +ALTER TABLE ONLY public.channel_buffer_collaborators ALTER COLUMN id SET DEFAULT nextval('public.channel_buffer_collaborators_id_seq'::regclass); + +ALTER TABLE ONLY public.channel_chat_participants ALTER COLUMN id SET DEFAULT nextval('public.channel_chat_participants_id_seq'::regclass); + +ALTER TABLE ONLY public.channel_members ALTER COLUMN id SET DEFAULT nextval('public.channel_members_id_seq'::regclass); + +ALTER TABLE ONLY public.channels ALTER COLUMN id SET DEFAULT nextval('public.channels_id_seq'::regclass); + +ALTER TABLE ONLY public.contacts ALTER COLUMN id SET DEFAULT nextval('public.contacts_id_seq'::regclass); + +ALTER TABLE ONLY public.extensions ALTER COLUMN id SET DEFAULT nextval('public.extensions_id_seq'::regclass); + +ALTER TABLE ONLY public.feature_flags ALTER COLUMN id SET DEFAULT nextval('public.feature_flags_id_seq'::regclass); + +ALTER TABLE ONLY public.followers ALTER COLUMN id SET DEFAULT nextval('public.followers_id_seq'::regclass); + +ALTER TABLE ONLY public.notification_kinds ALTER COLUMN id SET DEFAULT nextval('public.notification_kinds_id_seq'::regclass); + +ALTER TABLE ONLY public.notifications ALTER COLUMN id SET DEFAULT nextval('public.notifications_id_seq'::regclass); + +ALTER TABLE ONLY public.project_collaborators ALTER COLUMN id SET DEFAULT nextval('public.project_collaborators_id_seq'::regclass); + +ALTER TABLE ONLY public.projects ALTER COLUMN id SET DEFAULT nextval('public.projects_id_seq'::regclass); + +ALTER TABLE ONLY public.room_participants ALTER COLUMN id SET DEFAULT nextval('public.room_participants_id_seq'::regclass); + +ALTER TABLE ONLY public.rooms ALTER COLUMN id SET DEFAULT nextval('public.rooms_id_seq'::regclass); + +ALTER TABLE ONLY public.servers ALTER COLUMN id SET DEFAULT nextval('public.servers_id_seq'::regclass); + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + +ALTER TABLE ONLY public.access_tokens + ADD CONSTRAINT access_tokens_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.breakpoints + ADD CONSTRAINT breakpoints_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.buffer_operations + ADD CONSTRAINT buffer_operations_pkey PRIMARY KEY (buffer_id, epoch, lamport_timestamp, replica_id); + +ALTER TABLE ONLY public.buffer_snapshots + ADD CONSTRAINT buffer_snapshots_pkey PRIMARY KEY (buffer_id, epoch); + +ALTER TABLE ONLY public.buffers + ADD CONSTRAINT buffers_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.channel_buffer_collaborators + ADD CONSTRAINT channel_buffer_collaborators_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.channel_chat_participants + ADD CONSTRAINT channel_chat_participants_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.channel_members + ADD CONSTRAINT channel_members_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.channels + ADD CONSTRAINT channels_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.contacts + ADD CONSTRAINT contacts_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.contributors + ADD CONSTRAINT contributors_pkey PRIMARY KEY (user_id); + +ALTER TABLE ONLY public.extension_versions + ADD CONSTRAINT extension_versions_pkey PRIMARY KEY (extension_id, version); + +ALTER TABLE ONLY public.extensions + ADD CONSTRAINT extensions_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.feature_flags + ADD CONSTRAINT feature_flags_flag_key UNIQUE (flag); + +ALTER TABLE ONLY public.feature_flags + ADD CONSTRAINT feature_flags_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.followers + ADD CONSTRAINT followers_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.language_servers + ADD CONSTRAINT language_servers_pkey PRIMARY KEY (project_id, id); + +ALTER TABLE ONLY public.notification_kinds + ADD CONSTRAINT notification_kinds_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.notifications + ADD CONSTRAINT notifications_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.observed_buffer_edits + ADD CONSTRAINT observed_buffer_edits_pkey PRIMARY KEY (user_id, buffer_id); + +ALTER TABLE ONLY public.project_collaborators + ADD CONSTRAINT project_collaborators_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.project_repositories + ADD CONSTRAINT project_repositories_pkey PRIMARY KEY (project_id, id); + +ALTER TABLE ONLY public.project_repository_statuses + ADD CONSTRAINT project_repository_statuses_pkey PRIMARY KEY (project_id, repository_id, repo_path); + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT projects_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.rooms + ADD CONSTRAINT rooms_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.servers + ADD CONSTRAINT servers_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.user_features + ADD CONSTRAINT user_features_pkey PRIMARY KEY (user_id, feature_id); + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.worktree_diagnostic_summaries + ADD CONSTRAINT worktree_diagnostic_summaries_pkey PRIMARY KEY (project_id, worktree_id, path); + +ALTER TABLE ONLY public.worktree_entries + ADD CONSTRAINT worktree_entries_pkey PRIMARY KEY (project_id, worktree_id, id); + +ALTER TABLE ONLY public.worktree_settings_files + ADD CONSTRAINT worktree_settings_files_pkey PRIMARY KEY (project_id, worktree_id, path); + +ALTER TABLE ONLY public.worktrees + ADD CONSTRAINT worktrees_pkey PRIMARY KEY (project_id, id); + +CREATE INDEX index_access_tokens_user_id ON public.access_tokens USING btree (user_id); + +CREATE INDEX index_breakpoints_on_project_id ON public.breakpoints USING btree (project_id); + +CREATE INDEX index_buffers_on_channel_id ON public.buffers USING btree (channel_id); + +CREATE INDEX index_channel_buffer_collaborators_on_channel_id ON public.channel_buffer_collaborators USING btree (channel_id); + +CREATE UNIQUE INDEX index_channel_buffer_collaborators_on_channel_id_and_replica_id ON public.channel_buffer_collaborators USING btree (channel_id, replica_id); + +CREATE UNIQUE INDEX index_channel_buffer_collaborators_on_channel_id_connection_id_ ON public.channel_buffer_collaborators USING btree (channel_id, connection_id, connection_server_id); + +CREATE INDEX index_channel_buffer_collaborators_on_connection_id ON public.channel_buffer_collaborators USING btree (connection_id); + +CREATE INDEX index_channel_buffer_collaborators_on_connection_server_id ON public.channel_buffer_collaborators USING btree (connection_server_id); + +CREATE INDEX index_channel_chat_participants_on_channel_id ON public.channel_chat_participants USING btree (channel_id); + +CREATE UNIQUE INDEX index_channel_members_on_channel_id_and_user_id ON public.channel_members USING btree (channel_id, user_id); + +CREATE INDEX index_channels_on_parent_path ON public.channels USING btree (parent_path text_pattern_ops); + +CREATE INDEX index_channels_on_parent_path_and_order ON public.channels USING btree (parent_path, channel_order); + +CREATE INDEX index_contacts_user_id_b ON public.contacts USING btree (user_id_b); + +CREATE UNIQUE INDEX index_contacts_user_ids ON public.contacts USING btree (user_id_a, user_id_b); + +CREATE UNIQUE INDEX index_extensions_external_id ON public.extensions USING btree (external_id); + +CREATE INDEX index_extensions_total_download_count ON public.extensions USING btree (total_download_count); + +CREATE UNIQUE INDEX index_feature_flags ON public.feature_flags USING btree (id); + +CREATE UNIQUE INDEX index_followers_on_project_id_and_leader_connection_server_id_a ON public.followers USING btree (project_id, leader_connection_server_id, leader_connection_id, follower_connection_server_id, follower_connection_id); + +CREATE INDEX index_followers_on_room_id ON public.followers USING btree (room_id); + +CREATE UNIQUE INDEX index_invite_code_users ON public.users USING btree (invite_code); + +CREATE INDEX index_language_servers_on_project_id ON public.language_servers USING btree (project_id); + +CREATE UNIQUE INDEX index_notification_kinds_on_name ON public.notification_kinds USING btree (name); + +CREATE INDEX index_notifications_on_recipient_id_is_read_kind_entity_id ON public.notifications USING btree (recipient_id, is_read, kind, entity_id); + +CREATE UNIQUE INDEX index_observed_buffer_user_and_buffer_id ON public.observed_buffer_edits USING btree (user_id, buffer_id); + +CREATE INDEX index_project_collaborators_on_connection_id ON public.project_collaborators USING btree (connection_id); + +CREATE INDEX index_project_collaborators_on_connection_server_id ON public.project_collaborators USING btree (connection_server_id); + +CREATE INDEX index_project_collaborators_on_project_id ON public.project_collaborators USING btree (project_id); + +CREATE UNIQUE INDEX index_project_collaborators_on_project_id_and_replica_id ON public.project_collaborators USING btree (project_id, replica_id); + +CREATE UNIQUE INDEX index_project_collaborators_on_project_id_connection_id_and_ser ON public.project_collaborators USING btree (project_id, connection_id, connection_server_id); + +CREATE INDEX index_project_repos_statuses_on_project_id ON public.project_repository_statuses USING btree (project_id); + +CREATE INDEX index_project_repos_statuses_on_project_id_and_repo_id ON public.project_repository_statuses USING btree (project_id, repository_id); + +CREATE INDEX index_project_repositories_on_project_id ON public.project_repositories USING btree (project_id); + +CREATE INDEX index_projects_on_host_connection_id_and_host_connection_server ON public.projects USING btree (host_connection_id, host_connection_server_id); + +CREATE INDEX index_projects_on_host_connection_server_id ON public.projects USING btree (host_connection_server_id); + +CREATE INDEX index_room_participants_on_answering_connection_id ON public.room_participants USING btree (answering_connection_id); + +CREATE UNIQUE INDEX index_room_participants_on_answering_connection_id_and_answerin ON public.room_participants USING btree (answering_connection_id, answering_connection_server_id); + +CREATE INDEX index_room_participants_on_answering_connection_server_id ON public.room_participants USING btree (answering_connection_server_id); + +CREATE INDEX index_room_participants_on_calling_connection_server_id ON public.room_participants USING btree (calling_connection_server_id); + +CREATE INDEX index_room_participants_on_room_id ON public.room_participants USING btree (room_id); + +CREATE UNIQUE INDEX index_room_participants_on_user_id ON public.room_participants USING btree (user_id); + +CREATE UNIQUE INDEX index_rooms_on_channel_id ON public.rooms USING btree (channel_id); + +CREATE INDEX index_settings_files_on_project_id ON public.worktree_settings_files USING btree (project_id); + +CREATE INDEX index_settings_files_on_project_id_and_wt_id ON public.worktree_settings_files USING btree (project_id, worktree_id); + +CREATE INDEX index_user_features_on_feature_id ON public.user_features USING btree (feature_id); + +CREATE INDEX index_user_features_on_user_id ON public.user_features USING btree (user_id); + +CREATE UNIQUE INDEX index_user_features_user_id_and_feature_id ON public.user_features USING btree (user_id, feature_id); + +CREATE UNIQUE INDEX index_users_github_login ON public.users USING btree (github_login); + +CREATE INDEX index_users_on_email_address ON public.users USING btree (email_address); + +CREATE INDEX index_worktree_diagnostic_summaries_on_project_id ON public.worktree_diagnostic_summaries USING btree (project_id); + +CREATE INDEX index_worktree_diagnostic_summaries_on_project_id_and_worktree_ ON public.worktree_diagnostic_summaries USING btree (project_id, worktree_id); + +CREATE INDEX index_worktree_entries_on_project_id ON public.worktree_entries USING btree (project_id); + +CREATE INDEX index_worktree_entries_on_project_id_and_worktree_id ON public.worktree_entries USING btree (project_id, worktree_id); + +CREATE INDEX index_worktrees_on_project_id ON public.worktrees USING btree (project_id); + +CREATE INDEX trigram_index_extensions_name ON public.extensions USING gin (name public.gin_trgm_ops); + +CREATE INDEX trigram_index_users_on_github_login ON public.users USING gin (github_login public.gin_trgm_ops); + +CREATE UNIQUE INDEX uix_channels_parent_path_name ON public.channels USING btree (parent_path, name) WHERE ((parent_path IS NOT NULL) AND (parent_path <> ''::text)); + +CREATE UNIQUE INDEX uix_users_on_github_user_id ON public.users USING btree (github_user_id); + +ALTER TABLE ONLY public.access_tokens + ADD CONSTRAINT access_tokens_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.breakpoints + ADD CONSTRAINT breakpoints_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.buffer_operations + ADD CONSTRAINT buffer_operations_buffer_id_fkey FOREIGN KEY (buffer_id) REFERENCES public.buffers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.buffer_snapshots + ADD CONSTRAINT buffer_snapshots_buffer_id_fkey FOREIGN KEY (buffer_id) REFERENCES public.buffers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.buffers + ADD CONSTRAINT buffers_channel_id_fkey FOREIGN KEY (channel_id) REFERENCES public.channels(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_buffer_collaborators + ADD CONSTRAINT channel_buffer_collaborators_channel_id_fkey FOREIGN KEY (channel_id) REFERENCES public.channels(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_buffer_collaborators + ADD CONSTRAINT channel_buffer_collaborators_connection_server_id_fkey FOREIGN KEY (connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_buffer_collaborators + ADD CONSTRAINT channel_buffer_collaborators_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_chat_participants + ADD CONSTRAINT channel_chat_participants_channel_id_fkey FOREIGN KEY (channel_id) REFERENCES public.channels(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_chat_participants + ADD CONSTRAINT channel_chat_participants_connection_server_id_fkey FOREIGN KEY (connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_chat_participants + ADD CONSTRAINT channel_chat_participants_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + +ALTER TABLE ONLY public.channel_members + ADD CONSTRAINT channel_members_channel_id_fkey FOREIGN KEY (channel_id) REFERENCES public.channels(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.channel_members + ADD CONSTRAINT channel_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.contacts + ADD CONSTRAINT contacts_user_id_a_fkey FOREIGN KEY (user_id_a) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.contacts + ADD CONSTRAINT contacts_user_id_b_fkey FOREIGN KEY (user_id_b) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.contributors + ADD CONSTRAINT contributors_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + +ALTER TABLE ONLY public.extension_versions + ADD CONSTRAINT extension_versions_extension_id_fkey FOREIGN KEY (extension_id) REFERENCES public.extensions(id); + +ALTER TABLE ONLY public.project_repositories + ADD CONSTRAINT fk_project_repositories_project_id FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.project_repository_statuses + ADD CONSTRAINT fk_project_repository_statuses_project_id FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.followers + ADD CONSTRAINT followers_follower_connection_server_id_fkey FOREIGN KEY (follower_connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.followers + ADD CONSTRAINT followers_leader_connection_server_id_fkey FOREIGN KEY (leader_connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.followers + ADD CONSTRAINT followers_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.followers + ADD CONSTRAINT followers_room_id_fkey FOREIGN KEY (room_id) REFERENCES public.rooms(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.language_servers + ADD CONSTRAINT language_servers_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.notifications + ADD CONSTRAINT notifications_kind_fkey FOREIGN KEY (kind) REFERENCES public.notification_kinds(id); + +ALTER TABLE ONLY public.notifications + ADD CONSTRAINT notifications_recipient_id_fkey FOREIGN KEY (recipient_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.observed_buffer_edits + ADD CONSTRAINT observed_buffer_edits_buffer_id_fkey FOREIGN KEY (buffer_id) REFERENCES public.buffers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.observed_buffer_edits + ADD CONSTRAINT observed_buffer_edits_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.project_collaborators + ADD CONSTRAINT project_collaborators_connection_server_id_fkey FOREIGN KEY (connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.project_collaborators + ADD CONSTRAINT project_collaborators_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT projects_host_connection_server_id_fkey FOREIGN KEY (host_connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT projects_host_user_id_fkey FOREIGN KEY (host_user_id) REFERENCES public.users(id); + +ALTER TABLE ONLY public.projects + ADD CONSTRAINT projects_room_id_fkey FOREIGN KEY (room_id) REFERENCES public.rooms(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_answering_connection_server_id_fkey FOREIGN KEY (answering_connection_server_id) REFERENCES public.servers(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_calling_connection_server_id_fkey FOREIGN KEY (calling_connection_server_id) REFERENCES public.servers(id) ON DELETE SET NULL; + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_calling_user_id_fkey FOREIGN KEY (calling_user_id) REFERENCES public.users(id); + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_room_id_fkey FOREIGN KEY (room_id) REFERENCES public.rooms(id); + +ALTER TABLE ONLY public.room_participants + ADD CONSTRAINT room_participants_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + +ALTER TABLE ONLY public.rooms + ADD CONSTRAINT rooms_channel_id_fkey FOREIGN KEY (channel_id) REFERENCES public.channels(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.user_features + ADD CONSTRAINT user_features_feature_id_fkey FOREIGN KEY (feature_id) REFERENCES public.feature_flags(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.user_features + ADD CONSTRAINT user_features_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_inviter_id_fkey FOREIGN KEY (inviter_id) REFERENCES public.users(id) ON DELETE SET NULL; + +ALTER TABLE ONLY public.worktree_diagnostic_summaries + ADD CONSTRAINT worktree_diagnostic_summaries_project_id_worktree_id_fkey FOREIGN KEY (project_id, worktree_id) REFERENCES public.worktrees(project_id, id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.worktree_entries + ADD CONSTRAINT worktree_entries_project_id_worktree_id_fkey FOREIGN KEY (project_id, worktree_id) REFERENCES public.worktrees(project_id, id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.worktree_settings_files + ADD CONSTRAINT worktree_settings_files_project_id_worktree_id_fkey FOREIGN KEY (project_id, worktree_id) REFERENCES public.worktrees(project_id, id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.worktrees + ADD CONSTRAINT worktrees_project_id_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE; diff --git a/crates/collab/migrations_llm/20240806182921_create_providers_and_models.sql b/crates/collab/migrations_llm/20240806182921_create_providers_and_models.sql deleted file mode 100644 index b81ab7567f22ae750c9aad6357cbf995a75c47c4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240806182921_create_providers_and_models.sql +++ /dev/null @@ -1,19 +0,0 @@ -create table if not exists providers ( - id serial primary key, - name text not null -); - -create unique index uix_providers_on_name on providers (name); - -create table if not exists models ( - id serial primary key, - provider_id integer not null references providers (id) on delete cascade, - name text not null, - max_requests_per_minute integer not null, - max_tokens_per_minute integer not null, - max_tokens_per_day integer not null -); - -create unique index uix_models_on_provider_id_name on models (provider_id, name); -create index ix_models_on_provider_id on models (provider_id); -create index ix_models_on_name on models (name); diff --git a/crates/collab/migrations_llm/20240806213401_create_usages.sql b/crates/collab/migrations_llm/20240806213401_create_usages.sql deleted file mode 100644 index da2245d4b9ac20fb6dddcb892221d1ff773cd156..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240806213401_create_usages.sql +++ /dev/null @@ -1,19 +0,0 @@ -create table usage_measures ( - id serial primary key, - name text not null -); - -create unique index uix_usage_measures_on_name on usage_measures (name); - -create table if not exists usages ( - id serial primary key, - user_id integer not null, - model_id integer not null references models (id) on delete cascade, - measure_id integer not null references usage_measures (id) on delete cascade, - timestamp timestamp without time zone not null, - buckets bigint[] not null -); - -create index ix_usages_on_user_id on usages (user_id); -create index ix_usages_on_model_id on usages (model_id); -create unique index uix_usages_on_user_id_model_id_measure_id on usages (user_id, model_id, measure_id); diff --git a/crates/collab/migrations_llm/20240809130000_change_rate_limit_columns_to_bigint.sql b/crates/collab/migrations_llm/20240809130000_change_rate_limit_columns_to_bigint.sql deleted file mode 100644 index f1def8209a742743a6c708365afc05f7ab911e18..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240809130000_change_rate_limit_columns_to_bigint.sql +++ /dev/null @@ -1,4 +0,0 @@ -ALTER TABLE models - ALTER COLUMN max_requests_per_minute TYPE bigint, - ALTER COLUMN max_tokens_per_minute TYPE bigint, - ALTER COLUMN max_tokens_per_day TYPE bigint; diff --git a/crates/collab/migrations_llm/20240809160000_add_pricing_columns_to_models.sql b/crates/collab/migrations_llm/20240809160000_add_pricing_columns_to_models.sql deleted file mode 100644 index d9ffe2f9f29c10acc9aaa6dc48389f92298c404f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240809160000_add_pricing_columns_to_models.sql +++ /dev/null @@ -1,3 +0,0 @@ -ALTER TABLE models - ADD COLUMN price_per_million_input_tokens integer NOT NULL DEFAULT 0, - ADD COLUMN price_per_million_output_tokens integer NOT NULL DEFAULT 0; diff --git a/crates/collab/migrations_llm/20240812184444_add_is_staff_to_usages.sql b/crates/collab/migrations_llm/20240812184444_add_is_staff_to_usages.sql deleted file mode 100644 index a50feb2e3f41cd8e25265049a09140bdb04ea0d3..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240812184444_add_is_staff_to_usages.sql +++ /dev/null @@ -1 +0,0 @@ -alter table usages add column is_staff boolean not null default false; diff --git a/crates/collab/migrations_llm/20240812225346_create_lifetime_usages.sql b/crates/collab/migrations_llm/20240812225346_create_lifetime_usages.sql deleted file mode 100644 index 42047433e564d0f463a11f119ab4950d1d2d1254..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240812225346_create_lifetime_usages.sql +++ /dev/null @@ -1,9 +0,0 @@ -create table lifetime_usages ( - id serial primary key, - user_id integer not null, - model_id integer not null references models (id) on delete cascade, - input_tokens bigint not null default 0, - output_tokens bigint not null default 0 -); - -create unique index uix_lifetime_usages_on_user_id_model_id on lifetime_usages (user_id, model_id); diff --git a/crates/collab/migrations_llm/20240813002237_add_revoked_access_tokens_table.sql b/crates/collab/migrations_llm/20240813002237_add_revoked_access_tokens_table.sql deleted file mode 100644 index c30e58a6dd8bccd882a97b55b91e190950890aac..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20240813002237_add_revoked_access_tokens_table.sql +++ /dev/null @@ -1,7 +0,0 @@ -create table revoked_access_tokens ( - id serial primary key, - jti text not null, - revoked_at timestamp without time zone not null default now() -); - -create unique index uix_revoked_access_tokens_on_jti on revoked_access_tokens (jti); diff --git a/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql b/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql deleted file mode 100644 index 855e46ab0224dc4e20e0fb8634a3e678d584433e..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20241007173634_add_cache_token_counts.sql +++ /dev/null @@ -1,11 +0,0 @@ -alter table models - add column price_per_million_cache_creation_input_tokens integer not null default 0, - add column price_per_million_cache_read_input_tokens integer not null default 0; - -alter table usages - add column cache_creation_input_tokens_this_month bigint not null default 0, - add column cache_read_input_tokens_this_month bigint not null default 0; - -alter table lifetime_usages - add column cache_creation_input_tokens bigint not null default 0, - add column cache_read_input_tokens bigint not null default 0; diff --git a/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql b/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql deleted file mode 100644 index c204451b7538d8fefb41e3f2962433b552b91229..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20241007220716_drop_incorrect_usages_columns.sql +++ /dev/null @@ -1,3 +0,0 @@ -alter table usages - drop column cache_creation_input_tokens_this_month, - drop column cache_read_input_tokens_this_month; diff --git a/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql b/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql deleted file mode 100644 index 2733552a3a16f2754ba1c191e42ab4548f67848c..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20241008155620_create_monthly_usages.sql +++ /dev/null @@ -1,13 +0,0 @@ -create table monthly_usages ( - id serial primary key, - user_id integer not null, - model_id integer not null references models (id) on delete cascade, - month integer not null, - year integer not null, - input_tokens bigint not null default 0, - cache_creation_input_tokens bigint not null default 0, - cache_read_input_tokens bigint not null default 0, - output_tokens bigint not null default 0 -); - -create unique index uix_monthly_usages_on_user_id_model_id_month_year on monthly_usages (user_id, model_id, month, year); diff --git a/crates/collab/migrations_llm/20241010151249_create_billing_events.sql b/crates/collab/migrations_llm/20241010151249_create_billing_events.sql deleted file mode 100644 index 74a270872e5f664096d90179359e78a5a2298812..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20241010151249_create_billing_events.sql +++ /dev/null @@ -1,12 +0,0 @@ -create table billing_events ( - id serial primary key, - idempotency_key uuid not null default gen_random_uuid(), - user_id integer not null, - model_id integer not null references models (id) on delete cascade, - input_tokens bigint not null default 0, - input_cache_creation_tokens bigint not null default 0, - input_cache_read_tokens bigint not null default 0, - output_tokens bigint not null default 0 -); - -create index uix_billing_events_on_user_id_model_id on billing_events (user_id, model_id); diff --git a/crates/collab/migrations_llm/20250404141155_add_granular_token_limits_to_models.sql b/crates/collab/migrations_llm/20250404141155_add_granular_token_limits_to_models.sql deleted file mode 100644 index e5c50d8385745c602f01b2bb7daa9ba8d3580eaa..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250404141155_add_granular_token_limits_to_models.sql +++ /dev/null @@ -1,3 +0,0 @@ -alter table models - add column max_input_tokens_per_minute bigint not null default 0, - add column max_output_tokens_per_minute bigint not null default 0; diff --git a/crates/collab/migrations_llm/20250415213005_add_subscription_usages.sql b/crates/collab/migrations_llm/20250415213005_add_subscription_usages.sql deleted file mode 100644 index b3873710580b39cdb9c8fe8a2bb176839b5163b4..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250415213005_add_subscription_usages.sql +++ /dev/null @@ -1,10 +0,0 @@ -create table subscription_usages ( - id serial primary key, - user_id integer not null, - period_start_at timestamp without time zone not null, - period_end_at timestamp without time zone not null, - model_requests int not null default 0, - edit_predictions int not null default 0 -); - -create unique index uix_subscription_usages_on_user_id_start_at_end_at on subscription_usages (user_id, period_start_at, period_end_at); diff --git a/crates/collab/migrations_llm/20250416181354_add_plan_to_subscription_usages.sql b/crates/collab/migrations_llm/20250416181354_add_plan_to_subscription_usages.sql deleted file mode 100644 index 8d54c8b87ca820bd8aa46c7bf18ccd50ccf52807..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250416181354_add_plan_to_subscription_usages.sql +++ /dev/null @@ -1,4 +0,0 @@ -alter table subscription_usages - add column plan text not null; - -create index ix_subscription_usages_on_plan on subscription_usages (plan); diff --git a/crates/collab/migrations_llm/20250425171838_add_subscription_usage_meters.sql b/crates/collab/migrations_llm/20250425171838_add_subscription_usage_meters.sql deleted file mode 100644 index ded918e18385dbfe9f916bc0bdb49a3a6bde153f..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250425171838_add_subscription_usage_meters.sql +++ /dev/null @@ -1,8 +0,0 @@ -create table subscription_usage_meters ( - id serial primary key, - subscription_usage_id integer not null references subscription_usages (id) on delete cascade, - model_id integer not null references models (id) on delete cascade, - requests integer not null default 0 -); - -create unique index uix_subscription_usage_meters_on_subscription_usage_model on subscription_usage_meters (subscription_usage_id, model_id); diff --git a/crates/collab/migrations_llm/20250429143553_add_mode_to_subscription_usage_meters.sql b/crates/collab/migrations_llm/20250429143553_add_mode_to_subscription_usage_meters.sql deleted file mode 100644 index 9d63e299f56e0475a4016b2a69a0779f09cc8b4d..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250429143553_add_mode_to_subscription_usage_meters.sql +++ /dev/null @@ -1,6 +0,0 @@ -alter table subscription_usage_meters - add column mode text not null default 'normal'; - -drop index uix_subscription_usage_meters_on_subscription_usage_model; - -create unique index uix_subscription_usage_meters_on_subscription_usage_model_mode on subscription_usage_meters (subscription_usage_id, model_id, mode); diff --git a/crates/collab/migrations_llm/20250503162708_add_v2_subscription_usage_and_meter_tables.sql b/crates/collab/migrations_llm/20250503162708_add_v2_subscription_usage_and_meter_tables.sql deleted file mode 100644 index 59169d3c3ec722fd7dfcdde97f1ed3f34e5c51fd..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250503162708_add_v2_subscription_usage_and_meter_tables.sql +++ /dev/null @@ -1,23 +0,0 @@ -create table subscription_usages_v2 ( - id uuid primary key, - user_id integer not null, - period_start_at timestamp without time zone not null, - period_end_at timestamp without time zone not null, - plan text not null, - model_requests int not null default 0, - edit_predictions int not null default 0 -); - -create unique index uix_subscription_usages_v2_on_user_id_start_at_end_at on subscription_usages_v2 (user_id, period_start_at, period_end_at); - -create index ix_subscription_usages_v2_on_plan on subscription_usages_v2 (plan); - -create table subscription_usage_meters_v2 ( - id uuid primary key, - subscription_usage_id uuid not null references subscription_usages_v2 (id) on delete cascade, - model_id integer not null references models (id) on delete cascade, - mode text not null, - requests integer not null default 0 -); - -create unique index uix_subscription_usage_meters_v2_on_usage_model_mode on subscription_usage_meters_v2 (subscription_usage_id, model_id, mode); diff --git a/crates/collab/migrations_llm/20250504132836_drop_legacy_subscription_usage_and_meter_tables.sql b/crates/collab/migrations_llm/20250504132836_drop_legacy_subscription_usage_and_meter_tables.sql deleted file mode 100644 index f06b152d7ba9a38419facb18fd620899a63b083d..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250504132836_drop_legacy_subscription_usage_and_meter_tables.sql +++ /dev/null @@ -1,2 +0,0 @@ -drop table subscription_usage_meters; -drop table subscription_usages; diff --git a/crates/collab/migrations_llm/20250521211721_drop_monthly_and_lifetime_usages_tables.sql b/crates/collab/migrations_llm/20250521211721_drop_monthly_and_lifetime_usages_tables.sql deleted file mode 100644 index 5f03f50d0b3e17acf3aabd433df9ef317172039a..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250521211721_drop_monthly_and_lifetime_usages_tables.sql +++ /dev/null @@ -1,2 +0,0 @@ -drop table monthly_usages; -drop table lifetime_usages; diff --git a/crates/collab/migrations_llm/20250521222416_drop_billing_events_table.sql b/crates/collab/migrations_llm/20250521222416_drop_billing_events_table.sql deleted file mode 100644 index 36b79266b6adc448e99d6bb3fa1c88b9ee9604f5..0000000000000000000000000000000000000000 --- a/crates/collab/migrations_llm/20250521222416_drop_billing_events_table.sql +++ /dev/null @@ -1 +0,0 @@ -drop table billing_events; diff --git a/crates/collab/src/api/contributors.rs b/crates/collab/src/api/contributors.rs index 8cfef0ad7e717614e23c3cf9d04852c976f1f55f..ce318b15295ebe5c777597a6d3c6106e57af8e05 100644 --- a/crates/collab/src/api/contributors.rs +++ b/crates/collab/src/api/contributors.rs @@ -54,6 +54,26 @@ async fn check_is_contributor( ) -> Result> { let params = params.into_contributor_selector()?; + if CopilotSweAgentBot::is_copilot_bot(¶ms) { + return Ok(Json(CheckIsContributorResponse { + signed_at: Some( + CopilotSweAgentBot::created_at() + .and_utc() + .to_rfc3339_opts(SecondsFormat::Millis, true), + ), + })); + } + + if Dependabot::is_dependabot(¶ms) { + return Ok(Json(CheckIsContributorResponse { + signed_at: Some( + Dependabot::created_at() + .and_utc() + .to_rfc3339_opts(SecondsFormat::Millis, true), + ), + })); + } + if RenovateBot::is_renovate_bot(¶ms) { return Ok(Json(CheckIsContributorResponse { signed_at: Some( @@ -64,6 +84,16 @@ async fn check_is_contributor( })); } + if ZedZippyBot::is_zed_zippy_bot(¶ms) { + return Ok(Json(CheckIsContributorResponse { + signed_at: Some( + ZedZippyBot::created_at() + .and_utc() + .to_rfc3339_opts(SecondsFormat::Millis, true), + ), + })); + } + Ok(Json(CheckIsContributorResponse { signed_at: app .db @@ -73,6 +103,71 @@ async fn check_is_contributor( })) } +/// The Copilot bot GitHub user (`copilot-swe-agent[bot]`). +/// +/// https://api.github.com/users/copilot-swe-agent[bot] +struct CopilotSweAgentBot; + +impl CopilotSweAgentBot { + const LOGIN: &'static str = "copilot-swe-agent[bot]"; + const USER_ID: i32 = 198982749; + /// The alias of the GitHub copilot user. Although https://api.github.com/users/copilot + /// yields a 404, GitHub still refers to the copilot bot user as @Copilot in some cases. + const NAME_ALIAS: &'static str = "copilot"; + + /// Returns the `created_at` timestamp for the Dependabot bot user. + fn created_at() -> &'static NaiveDateTime { + static CREATED_AT: OnceLock = OnceLock::new(); + CREATED_AT.get_or_init(|| { + chrono::DateTime::parse_from_rfc3339("2025-02-12T20:26:08Z") + .expect("failed to parse 'created_at' for 'copilot-swe-agent[bot]'") + .naive_utc() + }) + } + + /// Returns whether the given contributor selector corresponds to the Copilot bot user. + fn is_copilot_bot(contributor: &ContributorSelector) -> bool { + match contributor { + ContributorSelector::GitHubLogin { github_login } => { + github_login == Self::LOGIN || github_login == Self::NAME_ALIAS + } + ContributorSelector::GitHubUserId { github_user_id } => { + github_user_id == &Self::USER_ID + } + } + } +} + +/// The Dependabot bot GitHub user (`dependabot[bot]`). +/// +/// https://api.github.com/users/dependabot[bot] +struct Dependabot; + +impl Dependabot { + const LOGIN: &'static str = "dependabot[bot]"; + const USER_ID: i32 = 49699333; + + /// Returns the `created_at` timestamp for the Dependabot bot user. + fn created_at() -> &'static NaiveDateTime { + static CREATED_AT: OnceLock = OnceLock::new(); + CREATED_AT.get_or_init(|| { + chrono::DateTime::parse_from_rfc3339("2019-04-16T22:34:25Z") + .expect("failed to parse 'created_at' for 'dependabot[bot]'") + .naive_utc() + }) + } + + /// Returns whether the given contributor selector corresponds to the Dependabot bot user. + fn is_dependabot(contributor: &ContributorSelector) -> bool { + match contributor { + ContributorSelector::GitHubLogin { github_login } => github_login == Self::LOGIN, + ContributorSelector::GitHubUserId { github_user_id } => { + github_user_id == &Self::USER_ID + } + } + } +} + /// The Renovate bot GitHub user (`renovate[bot]`). /// /// https://api.github.com/users/renovate[bot] @@ -103,6 +198,36 @@ impl RenovateBot { } } +/// The Zed Zippy bot GitHub user (`zed-zippy[bot]`). +/// +/// https://api.github.com/users/zed-zippy[bot] +struct ZedZippyBot; + +impl ZedZippyBot { + const LOGIN: &'static str = "zed-zippy[bot]"; + const USER_ID: i32 = 234243425; + + /// Returns the `created_at` timestamp for the Zed Zippy bot user. + fn created_at() -> &'static NaiveDateTime { + static CREATED_AT: OnceLock = OnceLock::new(); + CREATED_AT.get_or_init(|| { + chrono::DateTime::parse_from_rfc3339("2025-09-24T17:00:11Z") + .expect("failed to parse 'created_at' for 'zed-zippy[bot]'") + .naive_utc() + }) + } + + /// Returns whether the given contributor selector corresponds to the Zed Zippy bot user. + fn is_zed_zippy_bot(contributor: &ContributorSelector) -> bool { + match contributor { + ContributorSelector::GitHubLogin { github_login } => github_login == Self::LOGIN, + ContributorSelector::GitHubUserId { github_user_id } => { + github_user_id == &Self::USER_ID + } + } + } +} + #[derive(Debug, Deserialize)] struct AddContributorBody { github_user_id: i32, diff --git a/crates/collab/src/api/extensions.rs b/crates/collab/src/api/extensions.rs index 1ace433db298be7ffd159128b54b194395ba4fe5..187b2ab279e1b99b257a61cd2841617ecefb7b3c 100644 --- a/crates/collab/src/api/extensions.rs +++ b/crates/collab/src/api/extensions.rs @@ -11,7 +11,7 @@ use axum::{ }; use collections::{BTreeSet, HashMap}; use rpc::{ExtensionApiManifest, ExtensionProvides, GetExtensionsResponse}; -use semantic_version::SemanticVersion; +use semver::Version as SemanticVersion; use serde::Deserialize; use std::str::FromStr; use std::{sync::Arc, time::Duration}; @@ -108,8 +108,8 @@ struct GetExtensionUpdatesParams { ids: String, min_schema_version: i32, max_schema_version: i32, - min_wasm_api_version: SemanticVersion, - max_wasm_api_version: SemanticVersion, + min_wasm_api_version: semver::Version, + max_wasm_api_version: semver::Version, } async fn get_extension_updates( diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 1152cb97d79ef2c7df437479d79b28a5ca6d2ef7..a3eceb472cbb6c7d9686b520f0a5f3f13fa4dd90 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -22,7 +22,7 @@ use sea_orm::{ entity::prelude::*, sea_query::{Alias, Expr, OnConflict}, }; -use semantic_version::SemanticVersion; +use semver::Version; use serde::{Deserialize, Serialize}; use std::ops::RangeInclusive; use std::{ @@ -671,7 +671,7 @@ pub struct NewExtensionVersion { pub struct ExtensionVersionConstraints { pub schema_versions: RangeInclusive, - pub wasm_api_versions: RangeInclusive, + pub wasm_api_versions: RangeInclusive, } impl LocalSettingsKind { diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 7b457a5da438e0a9ab7c6cd79368b2845e962318..db91021c22b35b7b7159bd5cd54e28f8fa1a14e2 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -5,7 +5,6 @@ pub mod buffers; pub mod channels; pub mod contacts; pub mod contributors; -pub mod embeddings; pub mod extensions; pub mod notifications; pub mod projects; diff --git a/crates/collab/src/db/queries/embeddings.rs b/crates/collab/src/db/queries/embeddings.rs deleted file mode 100644 index 6ae8013284f4652d5cb0d4a19214c3a5c1a42df0..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/embeddings.rs +++ /dev/null @@ -1,94 +0,0 @@ -use super::*; -use time::Duration; -use time::OffsetDateTime; - -impl Database { - pub async fn get_embeddings( - &self, - model: &str, - digests: &[Vec], - ) -> Result, Vec>> { - self.transaction(|tx| async move { - let embeddings = { - let mut db_embeddings = embedding::Entity::find() - .filter( - embedding::Column::Model.eq(model).and( - embedding::Column::Digest - .is_in(digests.iter().map(|digest| digest.as_slice())), - ), - ) - .stream(&*tx) - .await?; - - let mut embeddings = HashMap::default(); - while let Some(db_embedding) = db_embeddings.next().await { - let db_embedding = db_embedding?; - embeddings.insert(db_embedding.digest, db_embedding.dimensions); - } - embeddings - }; - - if !embeddings.is_empty() { - let now = OffsetDateTime::now_utc(); - let retrieved_at = PrimitiveDateTime::new(now.date(), now.time()); - - embedding::Entity::update_many() - .filter( - embedding::Column::Digest - .is_in(embeddings.keys().map(|digest| digest.as_slice())), - ) - .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) - .exec(&*tx) - .await?; - } - - Ok(embeddings) - }) - .await - } - - pub async fn save_embeddings( - &self, - model: &str, - embeddings: &HashMap, Vec>, - ) -> Result<()> { - self.transaction(|tx| async move { - embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| { - let now_offset_datetime = OffsetDateTime::now_utc(); - let retrieved_at = - PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time()); - - embedding::ActiveModel { - model: ActiveValue::set(model.to_string()), - digest: ActiveValue::set(digest.clone()), - dimensions: ActiveValue::set(dimensions.clone()), - retrieved_at: ActiveValue::set(retrieved_at), - } - })) - .on_conflict( - OnConflict::columns([embedding::Column::Model, embedding::Column::Digest]) - .do_nothing() - .to_owned(), - ) - .exec_without_returning(&*tx) - .await?; - Ok(()) - }) - .await - } - - pub async fn purge_old_embeddings(&self) -> Result<()> { - self.transaction(|tx| async move { - embedding::Entity::delete_many() - .filter( - embedding::Column::RetrievedAt - .lte(OffsetDateTime::now_utc() - Duration::days(60)), - ) - .exec(&*tx) - .await?; - - Ok(()) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index b4dc4dd89d15fa1b80b561408f2bdc9a233094c0..729e3de99f16bd7351bb510ec24156a4fcae7c60 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -69,7 +69,7 @@ impl Database { extensions: &[extension::Model], constraints: Option<&ExtensionVersionConstraints>, tx: &DatabaseTransaction, - ) -> Result> { + ) -> Result> { let mut versions = extension_version::Entity::find() .filter( extension_version::Column::ExtensionId @@ -79,11 +79,10 @@ impl Database { .await?; let mut max_versions = - HashMap::::default(); + HashMap::::default(); while let Some(version) = versions.next().await { let version = version?; - let Some(extension_version) = SemanticVersion::from_str(&version.version).log_err() - else { + let Some(extension_version) = Version::from_str(&version.version).log_err() else { continue; }; @@ -102,7 +101,7 @@ impl Database { } if let Some(wasm_api_version) = version.wasm_api_version.as_ref() { - if let Some(version) = SemanticVersion::from_str(wasm_api_version).log_err() { + if let Some(version) = Version::from_str(wasm_api_version).log_err() { if !constraints.wasm_api_versions.contains(&version) { continue; } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 51a0ef83323ec70675283d2fdec7ca1ad791b12d..6f1d8b884d15041eadaa9073a5bd99e5ed352502 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -362,6 +362,8 @@ impl Database { entry_ids: ActiveValue::set("[]".into()), head_commit_details: ActiveValue::set(None), merge_message: ActiveValue::set(None), + remote_upstream_url: ActiveValue::set(None), + remote_origin_url: ActiveValue::set(None), } }), ) @@ -511,6 +513,8 @@ impl Database { serde_json::to_string(&update.current_merge_conflicts).unwrap(), )), merge_message: ActiveValue::set(update.merge_message.clone()), + remote_upstream_url: ActiveValue::set(update.remote_upstream_url.clone()), + remote_origin_url: ActiveValue::set(update.remote_origin_url.clone()), }) .on_conflict( OnConflict::columns([ @@ -1005,6 +1009,8 @@ impl Database { is_last_update: true, merge_message: db_repository_entry.merge_message, stash_entries: Vec::new(), + remote_upstream_url: db_repository_entry.remote_upstream_url.clone(), + remote_origin_url: db_repository_entry.remote_origin_url.clone(), }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index f020b99b5f1030cfe9391498512258e6db249bac..eafb5cac44a510bf4ced0434a9b4adfadff0ebbc 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -796,6 +796,8 @@ impl Database { is_last_update: true, merge_message: db_repository.merge_message, stash_entries: Vec::new(), + remote_upstream_url: db_repository.remote_upstream_url.clone(), + remote_origin_url: db_repository.remote_origin_url.clone(), }); } } diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index e619acaaf2bc237caac67dedcb5c738114d260d5..c179539e4bbc07fe070f3089f912b4c0b4fbf167 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -8,7 +8,6 @@ pub mod channel_chat_participant; pub mod channel_member; pub mod contact; pub mod contributor; -pub mod embedding; pub mod extension; pub mod extension_version; pub mod follower; @@ -23,7 +22,6 @@ pub mod project_repository_statuses; pub mod room; pub mod room_participant; pub mod server; -pub mod signup; pub mod user; pub mod worktree; pub mod worktree_diagnostic_summary; diff --git a/crates/collab/src/db/tables/embedding.rs b/crates/collab/src/db/tables/embedding.rs deleted file mode 100644 index 8743b4b9e65751bf350bff1db532de38ce73f368..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/embedding.rs +++ /dev/null @@ -1,18 +0,0 @@ -use sea_orm::entity::prelude::*; -use time::PrimitiveDateTime; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "embeddings")] -pub struct Model { - #[sea_orm(primary_key)] - pub model: String, - #[sea_orm(primary_key)] - pub digest: Vec, - pub dimensions: Vec, - pub retrieved_at: PrimitiveDateTime, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/project_repository.rs b/crates/collab/src/db/tables/project_repository.rs index eb653ecee37d48ce79e26450eb85d87dec411c1e..190ae8d79c54bb78daef4a1568ec75683eb0b0f2 100644 --- a/crates/collab/src/db/tables/project_repository.rs +++ b/crates/collab/src/db/tables/project_repository.rs @@ -22,6 +22,8 @@ pub struct Model { pub branch_summary: Option, // A JSON object representing the current Head commit values pub head_commit_details: Option, + pub remote_upstream_url: Option, + pub remote_origin_url: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/db/tables/signup.rs b/crates/collab/src/db/tables/signup.rs deleted file mode 100644 index 79d9f0580c13f981daf30283b1a9e8902ea0b995..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/signup.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::db::{SignupId, UserId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "signups")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: SignupId, - pub email_address: String, - pub email_confirmation_code: String, - pub email_confirmation_sent: bool, - pub created_at: DateTime, - pub device_id: Option, - pub user_id: Option, - pub inviting_user_id: Option, - pub platform_mac: bool, - pub platform_linux: bool, - pub platform_windows: bool, - pub platform_unknown: bool, - pub editor_features: Option>, - pub programming_languages: Option>, - pub added_to_mailing_list: bool, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/user.rs b/crates/collab/src/db/tables/user.rs index 8e8c03fafc92127f8754f473e04dfab39592ea14..3f753954ebb1cff78318de8d9be4786a4d5d0efb 100644 --- a/crates/collab/src/db/tables/user.rs +++ b/crates/collab/src/db/tables/user.rs @@ -39,25 +39,6 @@ pub enum Relation { Contributor, } -impl Model { - /// Returns the timestamp of when the user's account was created. - /// - /// This will be the earlier of the `created_at` and `github_user_created_at` timestamps. - pub fn account_created_at(&self) -> NaiveDateTime { - let mut account_created_at = self.created_at; - if let Some(github_created_at) = self.github_user_created_at { - account_created_at = account_created_at.min(github_created_at); - } - - account_created_at - } - - /// Returns the age of the user's account. - pub fn account_age(&self) -> chrono::Duration { - chrono::Utc::now().naive_utc() - self.account_created_at() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::AccessToken.def() diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 67c36576aac0f938bbc040202d7fa83e35af2d3b..10a32691ed36b4db9502c63bd510df6ffe1fe5b6 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -2,26 +2,22 @@ mod buffer_tests; mod channel_tests; mod contributor_tests; mod db_tests; -// we only run postgres tests on macos right now -#[cfg(target_os = "macos")] -mod embedding_tests; mod extension_tests; +mod migrations; -use crate::migrations::run_database_migrations; +use std::sync::Arc; +use std::sync::atomic::{AtomicI32, Ordering::SeqCst}; +use std::time::Duration; -use super::*; use gpui::BackgroundExecutor; use parking_lot::Mutex; use rand::prelude::*; use sea_orm::ConnectionTrait; use sqlx::migrate::MigrateDatabase; -use std::{ - sync::{ - Arc, - atomic::{AtomicI32, Ordering::SeqCst}, - }, - time::Duration, -}; + +use self::migrations::run_database_migrations; + +use super::*; pub struct TestDb { pub db: Option>, diff --git a/crates/collab/src/db/tests/embedding_tests.rs b/crates/collab/src/db/tests/embedding_tests.rs deleted file mode 100644 index 5d8d69c0304d3a16b55e9d7b1477fe62cc22024a..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tests/embedding_tests.rs +++ /dev/null @@ -1,87 +0,0 @@ -use super::TestDb; -use crate::db::embedding; -use collections::HashMap; -use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, sea_query::Expr}; -use std::ops::Sub; -use time::{Duration, OffsetDateTime, PrimitiveDateTime}; - -// SQLite does not support array arguments, so we only test this against a real postgres instance -#[gpui::test] -async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor()); - let db = test_db.db(); - - let provider = "test_model"; - let digest1 = vec![1, 2, 3]; - let digest2 = vec![4, 5, 6]; - let embeddings = HashMap::from_iter([ - (digest1.clone(), vec![0.1, 0.2, 0.3]), - (digest2.clone(), vec![0.4, 0.5, 0.6]), - ]); - - // Save embeddings - db.save_embeddings(provider, &embeddings).await.unwrap(); - - // Retrieve embeddings - let retrieved_embeddings = db - .get_embeddings(provider, &[digest1.clone(), digest2.clone()]) - .await - .unwrap(); - assert_eq!(retrieved_embeddings.len(), 2); - assert!(retrieved_embeddings.contains_key(&digest1)); - assert!(retrieved_embeddings.contains_key(&digest2)); - - // Check if the retrieved embeddings are correct - assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]); - assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]); -} - -#[gpui::test] -async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor()); - let db = test_db.db(); - - let model = "test_model"; - let digest = vec![7, 8, 9]; - let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]); - - // Save old embeddings - db.save_embeddings(model, &embeddings).await.unwrap(); - - // Reach into the DB and change the retrieved at to be > 60 days - db.transaction(|tx| { - let digest = digest.clone(); - async move { - let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61)); - let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time()); - - embedding::Entity::update_many() - .filter( - embedding::Column::Model - .eq(model) - .and(embedding::Column::Digest.eq(digest)), - ) - .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) - .exec(&*tx) - .await - .unwrap(); - - Ok(()) - } - }) - .await - .unwrap(); - - // Purge old embeddings - db.purge_old_embeddings().await.unwrap(); - - // Try to retrieve the purged embeddings - let retrieved_embeddings = db - .get_embeddings(model, std::slice::from_ref(&digest)) - .await - .unwrap(); - assert!( - retrieved_embeddings.is_empty(), - "Old embeddings should have been purged" - ); -} diff --git a/crates/collab/src/migrations.rs b/crates/collab/src/db/tests/migrations.rs similarity index 100% rename from crates/collab/src/migrations.rs rename to crates/collab/src/db/tests/migrations.rs diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 14573e94b0b535b1644510e28dfc906b1a2c420e..08f7e61c020ca9ea23be62636e381f9abedf7cf0 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -3,8 +3,6 @@ pub mod auth; pub mod db; pub mod env; pub mod executor; -pub mod llm; -pub mod migrations; pub mod rpc; pub mod seed; diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs deleted file mode 100644 index dec10232bdb000acef9def25cad519ceb213956b..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod db; diff --git a/crates/collab/src/llm/db.rs b/crates/collab/src/llm/db.rs deleted file mode 100644 index b15d5a42b5f183831b34552beba3f616d3a7c3f0..0000000000000000000000000000000000000000 --- a/crates/collab/src/llm/db.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::future::Future; -use std::sync::Arc; - -use anyhow::Context; -pub use sea_orm::ConnectOptions; -use sea_orm::{DatabaseConnection, DatabaseTransaction, IsolationLevel, TransactionTrait}; - -use crate::Result; -use crate::db::TransactionHandle; -use crate::executor::Executor; - -/// The database for the LLM service. -pub struct LlmDatabase { - options: ConnectOptions, - pool: DatabaseConnection, - #[allow(unused)] - executor: Executor, - #[cfg(test)] - runtime: Option, -} - -impl LlmDatabase { - /// Connects to the database with the given options - pub async fn new(options: ConnectOptions, executor: Executor) -> Result { - sqlx::any::install_default_drivers(); - Ok(Self { - options: options.clone(), - pool: sea_orm::Database::connect(options).await?, - executor, - #[cfg(test)] - runtime: None, - }) - } - - pub fn options(&self) -> &ConnectOptions { - &self.options - } - - pub async fn transaction(&self, f: F) -> Result - where - F: Send + Fn(TransactionHandle) -> Fut, - Fut: Send + Future>, - { - let body = async { - let (tx, result) = self.with_transaction(&f).await?; - match result { - Ok(result) => match tx.commit().await.map_err(Into::into) { - Ok(()) => Ok(result), - Err(error) => Err(error), - }, - Err(error) => { - tx.rollback().await?; - Err(error) - } - } - }; - - self.run(body).await - } - - async fn with_transaction(&self, f: &F) -> Result<(DatabaseTransaction, Result)> - where - F: Send + Fn(TransactionHandle) -> Fut, - Fut: Send + Future>, - { - let tx = self - .pool - .begin_with_config(Some(IsolationLevel::ReadCommitted), None) - .await?; - - let mut tx = Arc::new(Some(tx)); - let result = f(TransactionHandle(tx.clone())).await; - let tx = Arc::get_mut(&mut tx) - .and_then(|tx| tx.take()) - .context("couldn't complete transaction because it's still in use")?; - - Ok((tx, result)) - } - - async fn run(&self, future: F) -> Result - where - F: Future>, - { - #[cfg(test)] - { - if let Executor::Deterministic(executor) = &self.executor { - executor.simulate_random_delay().await; - } - - self.runtime.as_ref().unwrap().block_on(future) - } - - #[cfg(not(test))] - { - future.await - } - } -} diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 6b94459910647c1e48ee69f2b0dd38afd3723821..030158c94d640ef8a9024a8b783685bac7d0dcdb 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -1,4 +1,4 @@ -use anyhow::{Context as _, anyhow}; +use anyhow::anyhow; use axum::headers::HeaderMapExt; use axum::{ Extension, Router, @@ -9,17 +9,14 @@ use axum::{ use collab::ServiceMode; use collab::api::CloudflareIpCountryHeader; -use collab::llm::db::LlmDatabase; -use collab::migrations::run_database_migrations; use collab::{ AppState, Config, Result, api::fetch_extensions_from_blob_store_periodically, db, env, - executor::Executor, rpc::ResultExt, + executor::Executor, }; use db::Database; use std::{ env::args, net::{SocketAddr, TcpListener}, - path::Path, sync::Arc, time::Duration, }; @@ -49,10 +46,6 @@ async fn main() -> Result<()> { Some("version") => { println!("collab v{} ({})", VERSION, REVISION.unwrap_or("unknown")); } - Some("migrate") => { - let config = envy::from_env::().expect("error loading config"); - setup_app_database(&config).await?; - } Some("seed") => { let config = envy::from_env::().expect("error loading config"); let db_options = db::ConnectOptions::new(config.database_url.clone()); @@ -69,7 +62,7 @@ async fn main() -> Result<()> { Some("all") => ServiceMode::All, _ => { return Err(anyhow!( - "usage: collab >" + "usage: collab >" ))?; } }; @@ -90,13 +83,10 @@ async fn main() -> Result<()> { if mode.is_collab() || mode.is_api() { setup_app_database(&config).await?; - setup_llm_database(&config).await?; let state = AppState::new(config, Executor::Production).await?; if mode.is_collab() { - state.db.purge_old_embeddings().await.trace_err(); - let epoch = state .db .create_server(&state.config.zed_environment) @@ -213,25 +203,6 @@ async fn setup_app_database(config: &Config) -> Result<()> { let db_options = db::ConnectOptions::new(config.database_url.clone()); let mut db = Database::new(db_options).await?; - let migrations_path = config.migrations_path.as_deref().unwrap_or_else(|| { - #[cfg(feature = "sqlite")] - let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations.sqlite"); - #[cfg(not(feature = "sqlite"))] - let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations"); - - Path::new(default_migrations) - }); - - let migrations = run_database_migrations(db.options(), migrations_path).await?; - for (migration, duration) in migrations { - log::info!( - "Migrated {} {} {:?}", - migration.version, - migration.description, - duration - ); - } - db.initialize_notification_kinds().await?; if config.seed_path.is_some() { @@ -241,37 +212,6 @@ async fn setup_app_database(config: &Config) -> Result<()> { Ok(()) } -async fn setup_llm_database(config: &Config) -> Result<()> { - let database_url = config - .llm_database_url - .as_ref() - .context("missing LLM_DATABASE_URL")?; - - let db_options = db::ConnectOptions::new(database_url.clone()); - let db = LlmDatabase::new(db_options, Executor::Production).await?; - - let migrations_path = config - .llm_database_migrations_path - .as_deref() - .unwrap_or_else(|| { - let default_migrations = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations_llm"); - - Path::new(default_migrations) - }); - - let migrations = run_database_migrations(db.options(), migrations_path).await?; - for (migration, duration) in migrations { - log::info!( - "Migrated {} {} {:?}", - migration.version, - migration.description, - duration - ); - } - - Ok(()) -} - async fn handle_root(Extension(mode): Extension) -> String { format!("zed:{mode} v{VERSION} ({})", REVISION.unwrap_or("unknown")) } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index f73631bb19c80a463ed38b78031dd0fe4d452681..9511087af8887a3c799357d06050ce48431b38a6 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -50,7 +50,7 @@ use rpc::{ RequestMessage, ShareProject, UpdateChannelBufferCollaborators, }, }; -use semantic_version::SemanticVersion; +use semver::Version; use serde::{Serialize, Serializer}; use std::{ any::TypeId, @@ -453,6 +453,7 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) @@ -468,6 +469,8 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(update_context) @@ -984,14 +987,14 @@ impl Server { { let mut pool = self.connection_pool.lock(); - pool.add_connection(connection_id, user.id, user.admin, zed_version); + pool.add_connection(connection_id, user.id, user.admin, zed_version.clone()); self.peer.send( connection_id, build_initial_contacts_update(contacts, &pool), )?; } - if should_auto_subscribe_to_channels(zed_version) { + if should_auto_subscribe_to_channels(&zed_version) { subscribe_user_to_channels(user.id, session).await?; } @@ -1135,7 +1138,7 @@ impl Header for ProtocolVersion { } } -pub struct AppVersionHeader(SemanticVersion); +pub struct AppVersionHeader(Version); impl Header for AppVersionHeader { fn name() -> &'static HeaderName { static ZED_APP_VERSION: OnceLock = OnceLock::new(); @@ -2833,8 +2836,8 @@ async fn remove_contact( Ok(()) } -fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool { - version.0.minor() < 139 +fn should_auto_subscribe_to_channels(version: &ZedVersion) -> bool { + version.0.minor < 139 } async fn subscribe_to_channels( diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index 417edd66d66d7479f42fb09b01c7a5d9f05a6223..b1193239163fe34a0cb5802aa398abc37d1cca42 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -2,7 +2,7 @@ use crate::db::{ChannelId, ChannelRole, UserId}; use anyhow::{Context as _, Result}; use collections::{BTreeMap, HashMap, HashSet}; use rpc::ConnectionId; -use semantic_version::SemanticVersion; +use semver::Version; use serde::Serialize; use std::fmt; use tracing::instrument; @@ -19,8 +19,8 @@ struct ConnectedPrincipal { connection_ids: HashSet, } -#[derive(Copy, Clone, Debug, Serialize, PartialOrd, PartialEq, Eq, Ord)] -pub struct ZedVersion(pub SemanticVersion); +#[derive(Clone, Debug, Serialize, PartialOrd, PartialEq, Eq, Ord)] +pub struct ZedVersion(pub Version); impl fmt::Display for ZedVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -32,13 +32,13 @@ impl ZedVersion { pub fn can_collaborate(&self) -> bool { // v0.204.1 was the first version after the auto-update bug. // We reject any clients older than that to hope we can persuade them to upgrade. - if self.0 < SemanticVersion::new(0, 204, 1) { + if self.0 < Version::new(0, 204, 1) { return false; } // Since we hotfixed the changes to no longer connect to Collab automatically to Preview, we also need to reject // versions in the range [v0.199.0, v0.199.1]. - if self.0 >= SemanticVersion::new(0, 199, 0) && self.0 < SemanticVersion::new(0, 199, 2) { + if self.0 >= Version::new(0, 199, 0) && self.0 < Version::new(0, 199, 2) { return false; } diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index 7d07360b8042ed54a9f19a82a2876e448e8a14a4..3785ee0b7abaeddeac5c9acb1718407ab5bd54f2 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use call::Room; use client::ChannelId; use gpui::{Entity, TestAppContext}; @@ -18,7 +16,6 @@ mod randomized_test_helpers; mod remote_editing_collaboration_tests; mod test_server; -use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; pub use randomized_test_helpers::{ RandomizedTest, TestError, UserTestPlan, run_randomized_test, save_randomized_test_plan, }; @@ -51,17 +48,3 @@ fn room_participants(room: &Entity, cx: &mut TestAppContext) -> RoomPartic fn channel_id(room: &Entity, cx: &mut TestAppContext) -> Option { cx.read(|cx| room.read(cx).channel_id()) } - -fn rust_lang() -> Arc { - Arc::new(Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - )) -} diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 8e857f4f02505998f226b543b843e68222016aeb..62c61d3cf0b22e7adad5ada7ec46598fbadf673c 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -7,7 +7,7 @@ use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL; use client::{Collaborator, ParticipantIndex, UserId}; use collab_ui::channel_view::ChannelView; use collections::HashMap; -use editor::{Anchor, Editor, ToOffset}; +use editor::{Anchor, Editor, MultiBufferOffset, ToOffset}; use futures::future; use gpui::{BackgroundExecutor, Context, Entity, TestAppContext, Window}; use rpc::{RECEIVE_TIMEOUT, proto::PeerId}; @@ -180,7 +180,7 @@ async fn test_channel_notes_participant_indices( notes.editor.update(cx, |editor, cx| { editor.insert("a", window, cx); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges(vec![0..1]); + selections.select_ranges(vec![MultiBufferOffset(0)..MultiBufferOffset(1)]); }); }); }); @@ -190,7 +190,7 @@ async fn test_channel_notes_participant_indices( editor.move_down(&Default::default(), window, cx); editor.insert("b", window, cx); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges(vec![1..2]); + selections.select_ranges(vec![MultiBufferOffset(1)..MultiBufferOffset(2)]); }); }); }); @@ -200,7 +200,7 @@ async fn test_channel_notes_participant_indices( editor.move_down(&Default::default(), window, cx); editor.insert("c", window, cx); editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges(vec![2..3]); + selections.select_ranges(vec![MultiBufferOffset(2)..MultiBufferOffset(3)]); }); }); }); @@ -287,12 +287,12 @@ async fn test_channel_notes_participant_indices( editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges(vec![0..1]); + selections.select_ranges(vec![MultiBufferOffset(0)..MultiBufferOffset(1)]); }); }); editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(Default::default(), window, cx, |selections| { - selections.select_ranges(vec![2..3]); + selections.select_ranges(vec![MultiBufferOffset(2)..MultiBufferOffset(3)]); }); }); executor.run_until_parked(); @@ -327,7 +327,7 @@ fn assert_remote_selections( let end = s.selection.end.to_offset(snapshot.buffer_snapshot()); let user_id = collaborators.get(&peer_id).unwrap().user_id; let participant_index = hub.user_participant_indices(cx).get(&user_id).copied(); - (participant_index, start..end) + (participant_index, start.0..end.0) }) .collect::>(); assert_eq!( diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index bdc024aaca7242ab0fe261e3b673bf4d0efe23b1..4e6cdb0e79aba494bd01137cc262a097a084217e 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1,13 +1,12 @@ -use crate::{ - rpc::RECONNECT_TIMEOUT, - tests::{TestServer, rust_lang}, -}; +use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; use call::ActiveCall; use editor::{ - DocumentColorsRenderMode, Editor, FETCH_COLORS_DEBOUNCE_TIMEOUT, RowInfo, SelectionEffects, + DocumentColorsRenderMode, Editor, FETCH_COLORS_DEBOUNCE_TIMEOUT, MultiBufferOffset, RowInfo, + SelectionEffects, actions::{ - ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, - ExpandMacroRecursively, MoveToEnd, Redo, Rename, SelectAll, ToggleCodeActions, Undo, + ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, CopyFileLocation, + CopyFileName, CopyFileNameWithoutExtension, ExpandMacroRecursively, MoveToEnd, Redo, + Rename, SelectAll, ToggleCodeActions, Undo, }, test::{ editor_test_context::{AssertionContextManager, EditorTestContext}, @@ -21,8 +20,9 @@ use gpui::{ App, Rgba, SharedString, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext, }; use indoc::indoc; -use language::FakeLspAdapter; +use language::{FakeLspAdapter, rust_lang}; use lsp::LSP_REQUEST_TIMEOUT; +use pretty_assertions::assert_eq; use project::{ ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT, lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro}, @@ -288,7 +288,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( "}); } -#[gpui::test(iterations = 10)] +#[gpui::test] async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; let client_a = server.create_client(cx_a, "user_a").await; @@ -307,17 +307,83 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu ..lsp::ServerCapabilities::default() }; client_a.language_registry().add(rust_lang()); - let mut fake_language_servers = client_a.language_registry().register_fake_lsp( + let mut fake_language_servers = [ + client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: capabilities.clone(), + initializer: Some(Box::new(|fake_server| { + fake_server.set_request_handler::( + |params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 14), + ); + + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "first_method(…)".into(), + detail: Some("fn(&mut self, B) -> C".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "first_method($1)".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + lsp::CompletionItem { + label: "second_method(…)".into(), + detail: Some("fn(&mut self, C) -> D".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "second_method()".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + ]))) + }, + ); + })), + ..FakeLspAdapter::default() + }, + ), + client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "fake-analyzer", + capabilities: capabilities.clone(), + initializer: Some(Box::new(|fake_server| { + fake_server.set_request_handler::( + |_, _| async move { Ok(None) }, + ); + })), + ..FakeLspAdapter::default() + }, + ), + ]; + client_b.language_registry().add(rust_lang()); + client_b.language_registry().register_fake_lsp_adapter( "Rust", FakeLspAdapter { capabilities: capabilities.clone(), ..FakeLspAdapter::default() }, ); - client_b.language_registry().add(rust_lang()); client_b.language_registry().register_fake_lsp_adapter( "Rust", FakeLspAdapter { + name: "fake-analyzer", capabilities, ..FakeLspAdapter::default() }, @@ -352,8 +418,10 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu Editor::for_buffer(buffer_b.clone(), Some(project_b.clone()), window, cx) }); - let fake_language_server = fake_language_servers.next().await.unwrap(); + let fake_language_server = fake_language_servers[0].next().await.unwrap(); + let second_fake_language_server = fake_language_servers[1].next().await.unwrap(); cx_a.background_executor.run_until_parked(); + cx_b.background_executor.run_until_parked(); buffer_b.read_with(cx_b, |buffer, _| { assert!(!buffer.completion_triggers().is_empty()) @@ -362,59 +430,15 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu // Type a completion trigger character as the guest. editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input(".", window, cx); }); cx_b.focus(&editor_b); - // Receive a completion request as the host's language server. - // Return some completions from the host's language server. - cx_a.executor().start_waiting(); - fake_language_server - .set_request_handler::(|params, _| async move { - assert_eq!( - params.text_document_position.text_document.uri, - lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), - ); - assert_eq!( - params.text_document_position.position, - lsp::Position::new(0, 14), - ); - - Ok(Some(lsp::CompletionResponse::Array(vec![ - lsp::CompletionItem { - label: "first_method(…)".into(), - detail: Some("fn(&mut self, B) -> C".into()), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - new_text: "first_method($1)".to_string(), - range: lsp::Range::new( - lsp::Position::new(0, 14), - lsp::Position::new(0, 14), - ), - })), - insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), - ..Default::default() - }, - lsp::CompletionItem { - label: "second_method(…)".into(), - detail: Some("fn(&mut self, C) -> D".into()), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - new_text: "second_method()".to_string(), - range: lsp::Range::new( - lsp::Position::new(0, 14), - lsp::Position::new(0, 14), - ), - })), - insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), - ..Default::default() - }, - ]))) - }) - .next() - .await - .unwrap(); - cx_a.executor().finish_waiting(); + // Allow the completion request to propagate from guest to host to LSP. + cx_b.background_executor.run_until_parked(); + cx_a.background_executor.run_until_parked(); // Open the buffer on the host. let buffer_a = project_a @@ -460,6 +484,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu // The additional edit is applied. cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); buffer_a.read_with(cx_a, |buffer, _| { assert_eq!( @@ -479,7 +504,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu // resolved editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([46..46]) + s.select_ranges([MultiBufferOffset(46)..MultiBufferOffset(46)]) }); editor.handle_input("; a", window, cx); editor.handle_input(".", window, cx); @@ -522,6 +547,10 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu ]))) }); + // Second language server also needs to handle the request (returns None) + let mut second_completion_response = second_fake_language_server + .set_request_handler::(|_, _| async move { Ok(None) }); + // The completion now gets a new `text_edit.new_text` when resolving the completion item let mut resolve_completion_response = fake_language_server .set_request_handler::(|params, _| async move { @@ -545,6 +574,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu cx_b.executor().run_until_parked(); completion_response.next().await.unwrap(); + second_completion_response.next().await.unwrap(); editor_b.update_in(cx_b, |editor, window, cx| { assert!(editor.context_menu_visible()); @@ -563,6 +593,75 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu "use d::SomeTrait;\nfn main() { a.first_method(); a.third_method(, , ) }" ); }); + + // Ensure buffer is synced before proceeding with the next test + cx_a.executor().run_until_parked(); + cx_b.executor().run_until_parked(); + + // Test completions from the second fake language server + // Add another completion trigger to test the second language server + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(68)..MultiBufferOffset(68)]) + }); + editor.handle_input("; b", window, cx); + editor.handle_input(".", window, cx); + }); + + buffer_b.read_with(cx_b, |buffer, _| { + assert_eq!( + buffer.text(), + "use d::SomeTrait;\nfn main() { a.first_method(); a.third_method(, , ); b. }" + ); + }); + + // Set up completion handlers for both language servers + let mut first_lsp_completion = fake_language_server + .set_request_handler::(|_, _| async move { Ok(None) }); + + let mut second_lsp_completion = second_fake_language_server + .set_request_handler::(|params, _| async move { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(1, 54), + ); + + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "analyzer_method(…)".into(), + detail: Some("fn(&self) -> Result".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "analyzer_method()".to_string(), + range: lsp::Range::new( + lsp::Position::new(1, 54), + lsp::Position::new(1, 54), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..lsp::CompletionItem::default() + }, + ]))) + }); + + // Await both language server responses + first_lsp_completion.next().await.unwrap(); + second_lsp_completion.next().await.unwrap(); + + cx_b.executor().run_until_parked(); + + // Confirm the completion from the second language server works + editor_b.update_in(cx_b, |editor, window, cx| { + assert!(editor.context_menu_visible()); + editor.confirm_completion(&ConfirmCompletion { item_ix: Some(0) }, window, cx); + assert_eq!( + editor.text(cx), + "use d::SomeTrait;\nfn main() { a.first_method(); a.third_method(, , ); b.analyzer_method() }" + ); + }); } #[gpui::test(iterations = 10)] @@ -850,7 +949,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T // Move cursor to a location that can be renamed. let prepare_rename = editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([7..7]) + s.select_ranges([MultiBufferOffset(7)..MultiBufferOffset(7)]) }); editor.rename(&Rename, window, cx).unwrap() }); @@ -877,17 +976,17 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T let buffer = editor.buffer().read(cx).snapshot(cx); assert_eq!( rename.range.start.to_offset(&buffer)..rename.range.end.to_offset(&buffer), - 6..9 + MultiBufferOffset(6)..MultiBufferOffset(9) ); rename.editor.update(cx, |rename_editor, cx| { - let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); + let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); assert_eq!( rename_selection.range(), - 0..3, + MultiBufferOffset(0)..MultiBufferOffset(3), "Rename that was triggered from zero selection caret, should propose the whole word." ); rename_editor.buffer().update(cx, |rename_buffer, cx| { - rename_buffer.edit([(0..3, "THREE")], None, cx); + rename_buffer.edit([(MultiBufferOffset(0)..MultiBufferOffset(3), "THREE")], None, cx); }); }); }); @@ -898,7 +997,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T }); let prepare_rename = editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([7..8]) + s.select_ranges([MultiBufferOffset(7)..MultiBufferOffset(8)]) }); editor.rename(&Rename, window, cx).unwrap() }); @@ -925,16 +1024,16 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T let buffer = editor.buffer().read(cx).snapshot(cx); let lsp_rename_start = rename.range.start.to_offset(&buffer); let lsp_rename_end = rename.range.end.to_offset(&buffer); - assert_eq!(lsp_rename_start..lsp_rename_end, 6..9); + assert_eq!(lsp_rename_start..lsp_rename_end, MultiBufferOffset(6)..MultiBufferOffset(9)); rename.editor.update(cx, |rename_editor, cx| { - let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); + let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); assert_eq!( rename_selection.range(), - 1..2, + MultiBufferOffset(1)..MultiBufferOffset(2), "Rename that was triggered from a selection, should have the same selection range in the rename proposal" ); rename_editor.buffer().update(cx, |rename_buffer, cx| { - rename_buffer.edit([(0..lsp_rename_end - lsp_rename_start, "THREE")], None, cx); + rename_buffer.edit([(MultiBufferOffset(0)..MultiBufferOffset(lsp_rename_end - lsp_rename_start), "THREE")], None, cx); }); }); }); @@ -1137,7 +1236,7 @@ async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte // Move cursor to a location, this should trigger the code lens call. editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([7..7]) + s.select_ranges([MultiBufferOffset(7)..MultiBufferOffset(7)]) }); }); let () = request_started_rx.next().await.unwrap(); @@ -1159,7 +1258,7 @@ async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1]) + s.select_ranges([MultiBufferOffset(1)..MultiBufferOffset(1)]) }); }); let () = request_started_rx.next().await.unwrap(); @@ -1181,7 +1280,7 @@ async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([2..2]) + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(2)]) }); }); let () = request_started_rx.next().await.unwrap(); @@ -1479,7 +1578,10 @@ async fn test_share_project( buffer_a.read_with(cx_a, |buffer, _| { buffer .snapshot() - .selections_in_range(text::Anchor::MIN..text::Anchor::MAX, false) + .selections_in_range( + text::Anchor::min_max_range_for_buffer(buffer.remote_id()), + false, + ) .count() == 1 }); @@ -1520,7 +1622,10 @@ async fn test_share_project( buffer_a.read_with(cx_a, |buffer, _| { buffer .snapshot() - .selections_in_range(text::Anchor::MIN..text::Anchor::MAX, false) + .selections_in_range( + text::Anchor::min_max_range_for_buffer(buffer.remote_id()), + false, + ) .count() == 0 }); @@ -1619,7 +1724,7 @@ async fn test_on_input_format_from_host_to_guest( cx_a.focus(&editor_a); editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input(">", window, cx); }); @@ -1728,7 +1833,7 @@ async fn test_on_input_format_from_guest_to_host( cx_b.focus(&editor_b); editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input(":", window, cx); }); @@ -1956,7 +2061,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( let after_client_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1; editor_b.update_in(cx_b, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13].clone()) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)].clone()) }); editor.handle_input(":", window, cx); }); @@ -1980,7 +2085,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( let after_host_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1; editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input("a change to increment both buffers' versions", window, cx); }); @@ -2169,16 +2274,28 @@ async fn test_inlay_hint_refresh_is_forwarded( } else { "initial hint" }; - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, character), - label: lsp::InlayHintLabel::String(label.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) + Ok(Some(vec![ + lsp::InlayHint { + position: lsp::Position::new(0, character), + label: lsp::InlayHintLabel::String(label.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(1090, 1090), + label: lsp::InlayHintLabel::String("out-of-bounds hint".to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + ])) } }) .next() @@ -2408,7 +2525,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13].clone()) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)].clone()) }); editor.handle_input(":", window, cx); }); @@ -2845,7 +2962,7 @@ async fn test_lsp_pull_diagnostics( editor_a_main.update(cx_a, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); assert_eq!( all_diagnostics.len(), @@ -2974,7 +3091,7 @@ async fn test_lsp_pull_diagnostics( editor_a_main.update(cx_a, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); assert_eq!( all_diagnostics.len(), @@ -3021,7 +3138,7 @@ async fn test_lsp_pull_diagnostics( editor_b_main.update(cx_b, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); assert_eq!( all_diagnostics.len(), @@ -3068,17 +3185,16 @@ async fn test_lsp_pull_diagnostics( editor_b_lib.update(cx_b, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); let expected_messages = [ expected_pull_diagnostic_lib_message, - // TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer. - // expected_push_diagnostic_lib_message, + expected_push_diagnostic_lib_message, ]; assert_eq!( all_diagnostics.len(), - 1, - "Expected pull diagnostics, but got: {all_diagnostics:?}" + 2, + "Expected pull and push diagnostics, but got: {all_diagnostics:?}" ); for diagnostic in all_diagnostics { assert!( @@ -3135,17 +3251,18 @@ async fn test_lsp_pull_diagnostics( editor_b_lib.update(cx_b, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); let expected_messages = [ - expected_workspace_pull_diagnostics_lib_message, - // TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer. - // expected_push_diagnostic_lib_message, + // Despite workspace diagnostics provided, + // the currently open file's diagnostics should be preferred, as LSP suggests. + expected_pull_diagnostic_lib_message, + expected_push_diagnostic_lib_message, ]; assert_eq!( all_diagnostics.len(), - 1, - "Expected pull diagnostics, but got: {all_diagnostics:?}" + 2, + "Expected pull and push diagnostics, but got: {all_diagnostics:?}" ); for diagnostic in all_diagnostics { assert!( @@ -3258,8 +3375,9 @@ async fn test_lsp_pull_diagnostics( "Another workspace diagnostics pull should happen after the diagnostics refresh server request" ); { - assert!( - diagnostics_pulls_result_ids.lock().await.len() == diagnostic_pulls_result_ids, + assert_eq!( + diagnostics_pulls_result_ids.lock().await.len(), + diagnostic_pulls_result_ids, "Pulls should not happen hence no extra ids should appear" ); assert!( @@ -3270,14 +3388,14 @@ async fn test_lsp_pull_diagnostics( editor_b_lib.update(cx_b, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); let expected_messages = [ expected_workspace_pull_diagnostics_lib_message, expected_pull_diagnostic_lib_message, expected_push_diagnostic_lib_message, ]; - assert_eq!(all_diagnostics.len(), 1); + assert_eq!(all_diagnostics.len(), 2); for diagnostic in &all_diagnostics { assert!( expected_messages.contains(&diagnostic.diagnostic.message.as_str()), @@ -3288,7 +3406,7 @@ async fn test_lsp_pull_diagnostics( editor_b_main.update(cx_b, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); assert_eq!(all_diagnostics.len(), 2); @@ -3307,7 +3425,7 @@ async fn test_lsp_pull_diagnostics( editor_a_main.update(cx_a, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let all_diagnostics = snapshot - .diagnostics_in_range(0..snapshot.len()) + .diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) .collect::>(); assert_eq!(all_diagnostics.len(), 2); let expected_messages = [ @@ -3396,7 +3514,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .into_iter() .map(|(sha, message)| (sha.parse().unwrap(), message.into())) .collect(), - remote_url: Some("git@github.com:zed-industries/zed.git".to_string()), }; client_a.fs().set_blame_for_repo( Path::new(path!("/my-repo/.git")), @@ -3481,10 +3598,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA for (idx, (buffer, entry)) in entries.iter().flatten().enumerate() { let details = blame.details_for_entry(*buffer, entry).unwrap(); assert_eq!(details.message, format!("message for idx-{}", idx)); - assert_eq!( - details.permalink.unwrap().to_string(), - format!("https://github.com/zed-industries/zed/commit/{}", entry.sha) - ); } }); }); @@ -4156,6 +4269,288 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes }); } +#[gpui::test] +async fn test_copy_file_name_without_extension( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + path!("/root"), + json!({ + "src": { + "main.rs": indoc! {" + fn main() { + println!(\"Hello, world!\"); + } + "}, + } + }), + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project(path!("/root"), cx_a).await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let editor_a = workspace_a + .update_in(cx_a, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let editor_b = workspace_b + .update_in(cx_b, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + editor_a.update_in(cx_a, |editor, window, cx| { + editor.copy_file_name_without_extension(&CopyFileNameWithoutExtension, window, cx); + }); + + assert_eq!( + cx_a.read_from_clipboard().and_then(|item| item.text()), + Some("main".to_string()) + ); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.copy_file_name_without_extension(&CopyFileNameWithoutExtension, window, cx); + }); + + assert_eq!( + cx_b.read_from_clipboard().and_then(|item| item.text()), + Some("main".to_string()) + ); +} + +#[gpui::test] +async fn test_copy_file_name(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + path!("/root"), + json!({ + "src": { + "main.rs": indoc! {" + fn main() { + println!(\"Hello, world!\"); + } + "}, + } + }), + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project(path!("/root"), cx_a).await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let editor_a = workspace_a + .update_in(cx_a, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let editor_b = workspace_b + .update_in(cx_b, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + editor_a.update_in(cx_a, |editor, window, cx| { + editor.copy_file_name(&CopyFileName, window, cx); + }); + + assert_eq!( + cx_a.read_from_clipboard().and_then(|item| item.text()), + Some("main.rs".to_string()) + ); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.copy_file_name(&CopyFileName, window, cx); + }); + + assert_eq!( + cx_b.read_from_clipboard().and_then(|item| item.text()), + Some("main.rs".to_string()) + ); +} + +#[gpui::test] +async fn test_copy_file_location(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + cx_b.update(editor::init); + + client_a + .fs() + .insert_tree( + path!("/root"), + json!({ + "src": { + "main.rs": indoc! {" + fn main() { + println!(\"Hello, world!\"); + } + "}, + } + }), + ) + .await; + + let (project_a, worktree_id) = client_a.build_local_project(path!("/root"), cx_a).await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let editor_a = workspace_a + .update_in(cx_a, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + let editor_b = workspace_b + .update_in(cx_b, |workspace, window, cx| { + workspace.open_path( + (worktree_id, rel_path("src/main.rs")), + None, + true, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + editor_a.update_in(cx_a, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(16)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_a.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2", path!("src/main.rs"))) + ); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(16)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_b.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2", path!("src/main.rs"))) + ); +} + #[track_caller] fn tab_undo_assert( cx_a: &mut EditorTestContext, diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 07cf866a3513d27894307216e904b130eb023e22..ec654e06341b6fdcbe88e4031f425d18dd6461e7 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -6,7 +6,7 @@ use collab_ui::{ channel_view::ChannelView, notifications::project_shared_notification::ProjectSharedNotification, }; -use editor::{Editor, MultiBuffer, PathKey, SelectionEffects}; +use editor::{Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects}; use gpui::{ AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, TestAppContext, VisualContext, VisualTestContext, point, @@ -124,7 +124,7 @@ async fn test_basic_following( editor.select_left(&Default::default(), window, cx); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![3..2] + vec![MultiBufferOffset(3)..MultiBufferOffset(2)] ); }); editor_a2.update_in(cx_a, |editor, window, cx| { @@ -133,7 +133,7 @@ async fn test_basic_following( editor.select_left(&Default::default(), window, cx); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![2..1] + vec![MultiBufferOffset(2)..MultiBufferOffset(1)] ); }); @@ -158,13 +158,13 @@ async fn test_basic_following( editor_b2.update(cx_b, |editor, cx| editor .selections .ranges(&editor.display_snapshot(cx))), - vec![2..1] + vec![MultiBufferOffset(2)..MultiBufferOffset(1)] ); assert_eq!( editor_b1.update(cx_b, |editor, cx| editor .selections .ranges(&editor.display_snapshot(cx))), - vec![3..3] + vec![MultiBufferOffset(3)..MultiBufferOffset(3)] ); executor.run_until_parked(); @@ -386,7 +386,10 @@ async fn test_basic_following( // Changes to client A's editor are reflected on client B. editor_a1.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1, 2..2]) + s.select_ranges([ + MultiBufferOffset(1)..MultiBufferOffset(1), + MultiBufferOffset(2)..MultiBufferOffset(2), + ]) }); }); executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); @@ -396,7 +399,10 @@ async fn test_basic_following( editor_b1.update(cx_b, |editor, cx| { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - &[1..1, 2..2] + &[ + MultiBufferOffset(1)..MultiBufferOffset(1), + MultiBufferOffset(2)..MultiBufferOffset(2) + ] ); }); @@ -408,7 +414,7 @@ async fn test_basic_following( editor_a1.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([3..3]) + s.select_ranges([MultiBufferOffset(3)..MultiBufferOffset(3)]) }); editor.set_scroll_position(point(0., 100.), window, cx); }); @@ -417,7 +423,7 @@ async fn test_basic_following( editor_b1.update(cx_b, |editor, cx| { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - &[3..3] + &[MultiBufferOffset(3)..MultiBufferOffset(3)] ); }); @@ -523,7 +529,7 @@ async fn test_basic_following( }); // Client B activates a panel, and the previously-opened screen-sharing item gets activated. - let panel = cx_b.new(|cx| TestPanel::new(DockPosition::Left, cx)); + let panel = cx_b.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); workspace_b.update_in(cx_b, |workspace, window, cx| { workspace.add_panel(panel, window, cx); workspace.toggle_panel_focus::(window, cx); @@ -1694,7 +1700,7 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T // b should follow a to position 1 editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1]) + s.select_ranges([MultiBufferOffset(1)..MultiBufferOffset(1)]) }) }); cx_a.executor() @@ -1703,7 +1709,7 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T editor_b.update(cx_b, |editor, cx| { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![1..1] + vec![MultiBufferOffset(1)..MultiBufferOffset(1)] ) }); @@ -1719,7 +1725,7 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T // b should not follow a to position 2 editor_a.update_in(cx_a, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([2..2]) + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(2)]) }) }); cx_a.executor() @@ -1728,7 +1734,7 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T editor_b.update(cx_b, |editor, cx| { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![1..1] + vec![MultiBufferOffset(1)..MultiBufferOffset(1)] ) }); cx_b.update(|_, cx| { @@ -1829,7 +1835,7 @@ async fn test_following_into_excluded_file( editor.select_left(&Default::default(), window, cx); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![3..2] + vec![MultiBufferOffset(3)..MultiBufferOffset(2)] ); }); editor_for_excluded_a.update_in(cx_a, |editor, window, cx| { @@ -1838,7 +1844,7 @@ async fn test_following_into_excluded_file( editor.select_left(&Default::default(), window, cx); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![18..17] + vec![MultiBufferOffset(18)..MultiBufferOffset(17)] ); }); @@ -1864,7 +1870,7 @@ async fn test_following_into_excluded_file( editor_for_excluded_b.update(cx_b, |editor, cx| editor .selections .ranges(&editor.display_snapshot(cx))), - vec![18..17] + vec![MultiBufferOffset(18)..MultiBufferOffset(17)] ); editor_for_excluded_a.update_in(cx_a, |editor, window, cx| { @@ -2040,7 +2046,7 @@ async fn test_following_to_channel_notes_without_a_shared_project( notes.editor.update(cx, |editor, cx| { editor.insert("Hello from A.", window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.select_ranges(vec![3..4]); + selections.select_ranges(vec![MultiBufferOffset(3)..MultiBufferOffset(4)]); }); }); }); @@ -2076,8 +2082,8 @@ async fn test_following_to_channel_notes_without_a_shared_project( assert_eq!( editor .selections - .ranges::(&editor.display_snapshot(cx)), - &[3..4] + .ranges::(&editor.display_snapshot(cx)), + &[MultiBufferOffset(3)..MultiBufferOffset(4)] ); }) }); diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index a4c8dc0e5b7e5eb01f099c11f29a5d651da09303..391e7355ea196dfe25d363472918837ea817f450 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2,7 +2,7 @@ use crate::{ rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, tests::{ RoomParticipants, TestClient, TestServer, channel_id, following_tests::join_channel, - room_participants, rust_lang, + room_participants, }, }; use anyhow::{Result, anyhow}; @@ -26,7 +26,7 @@ use language::{ Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, language_settings::{Formatter, FormatterList}, - tree_sitter_rust, tree_sitter_typescript, + rust_lang, tree_sitter_rust, tree_sitter_typescript, }; use lsp::{LanguageServerId, OneOf}; use parking_lot::Mutex; @@ -6551,12 +6551,12 @@ async fn test_pane_split_left(cx: &mut TestAppContext) { assert!(workspace.items(cx).collect::>().len() == 2); }); cx.simulate_keystrokes("cmd-k"); - // sleep for longer than the timeout in keyboard shortcut handling - // to verify that it doesn't fire in this case. + // Sleep past the historical timeout to ensure the multi-stroke binding + // still fires now that unambiguous prefixes no longer auto-expire. cx.executor().advance_clock(Duration::from_secs(2)); cx.simulate_keystrokes("left"); workspace.update(cx, |workspace, cx| { - assert!(workspace.items(cx).collect::>().len() == 2); + assert!(workspace.items(cx).collect::>().len() == 3); }); } diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index e5cc506bbca8b0a4a2fca972df61d373a288702c..5342b0bbd4b11afb24ccbaa6d4bf17df036cec76 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -4,33 +4,39 @@ use collections::{HashMap, HashSet}; use dap::{Capabilities, adapters::DebugTaskDefinition, transport::RequestHandling}; use debugger_ui::debugger_panel::DebugPanel; +use editor::{Editor, EditorMode, MultiBuffer}; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs as _, RemoveOptions}; use futures::StreamExt as _; -use gpui::{ - AppContext as _, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _, - VisualContext, -}; +use gpui::{AppContext as _, BackgroundExecutor, TestAppContext, UpdateGlobal as _, VisualContext}; use http_client::BlockedHttpClient; use language::{ FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, language_settings::{Formatter, FormatterList, language_settings}, - tree_sitter_typescript, + rust_lang, tree_sitter_typescript, }; use node_runtime::NodeRuntime; use project::{ ProjectPath, debugger::session::ThreadId, lsp_store::{FormatTrigger, LspFormatTarget}, + trusted_worktrees::{PathTrust, TrustedWorktrees}, }; use remote::RemoteClient; use remote_server::{HeadlessAppState, HeadlessProject}; use rpc::proto; use serde_json::json; -use settings::{LanguageServerFormatterSpecifier, PrettierSettingsContent, SettingsStore}; +use settings::{ + InlayHintSettingsContent, LanguageServerFormatterSpecifier, PrettierSettingsContent, + SettingsStore, +}; use std::{ path::Path, - sync::{Arc, atomic::AtomicUsize}, + sync::{ + Arc, + atomic::{AtomicUsize, Ordering}, + }, + time::Duration, }; use task::TcpArgumentsTemplate; use util::{path, rel_path::rel_path}; @@ -43,10 +49,10 @@ async fn test_sharing_an_ssh_remote_project( ) { let executor = cx_a.executor(); cx_a.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); server_cx.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); let mut server = TestServer::start(executor.clone()).await; let client_a = server.create_client(cx_a, "user_a").await; @@ -93,13 +99,14 @@ async fn test_sharing_an_ssh_remote_project( languages, extension_host_proxy: Arc::new(ExtensionHostProxy::new()), }, + false, cx, ) }); let client_ssh = RemoteClient::fake_client(opts, cx_a).await; let (project_a, worktree_id) = client_a - .build_ssh_project(path!("/code/project1"), client_ssh, cx_a) + .build_ssh_project(path!("/code/project1"), client_ssh, false, cx_a) .await; // While the SSH worktree is being scanned, user A shares the remote project. @@ -211,10 +218,10 @@ async fn test_ssh_collaboration_git_branches( server_cx.set_name("server"); cx_a.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); server_cx.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); let mut server = TestServer::start(executor.clone()).await; @@ -253,13 +260,14 @@ async fn test_ssh_collaboration_git_branches( languages, extension_host_proxy: Arc::new(ExtensionHostProxy::new()), }, + false, cx, ) }); let client_ssh = RemoteClient::fake_client(opts, cx_a).await; let (project_a, _) = client_a - .build_ssh_project("/project", client_ssh, cx_a) + .build_ssh_project("/project", client_ssh, false, cx_a) .await; // While the SSH worktree is being scanned, user A shares the remote project. @@ -396,10 +404,10 @@ async fn test_ssh_collaboration_formatting_with_prettier( server_cx.set_name("server"); cx_a.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); server_cx.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); let mut server = TestServer::start(executor.clone()).await; @@ -457,13 +465,14 @@ async fn test_ssh_collaboration_formatting_with_prettier( languages, extension_host_proxy: Arc::new(ExtensionHostProxy::new()), }, + false, cx, ) }); let client_ssh = RemoteClient::fake_client(opts, cx_a).await; let (project_a, worktree_id) = client_a - .build_ssh_project(path!("/project"), client_ssh, cx_a) + .build_ssh_project(path!("/project"), client_ssh, false, cx_a) .await; // While the SSH worktree is being scanned, user A shares the remote project. @@ -583,13 +592,13 @@ async fn test_remote_server_debugger( executor: BackgroundExecutor, ) { cx_a.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); command_palette_hooks::init(cx); zlog::init_test(); dap_adapters::init(cx); }); server_cx.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); dap_adapters::init(cx); }); let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx); @@ -618,6 +627,7 @@ async fn test_remote_server_debugger( languages, extension_host_proxy: Arc::new(ExtensionHostProxy::new()), }, + false, cx, ) }); @@ -630,7 +640,7 @@ async fn test_remote_server_debugger( command_palette_hooks::init(cx); }); let (project_a, _) = client_a - .build_ssh_project(path!("/code"), client_ssh.clone(), cx_a) + .build_ssh_project(path!("/code"), client_ssh.clone(), false, cx_a) .await; let (workspace, cx_a) = client_a.build_workspace(&project_a, cx_a); @@ -691,13 +701,13 @@ async fn test_slow_adapter_startup_retries( executor: BackgroundExecutor, ) { cx_a.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); command_palette_hooks::init(cx); zlog::init_test(); dap_adapters::init(cx); }); server_cx.update(|cx| { - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); dap_adapters::init(cx); }); let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx); @@ -726,6 +736,7 @@ async fn test_slow_adapter_startup_retries( languages, extension_host_proxy: Arc::new(ExtensionHostProxy::new()), }, + false, cx, ) }); @@ -738,7 +749,7 @@ async fn test_slow_adapter_startup_retries( command_palette_hooks::init(cx); }); let (project_a, _) = client_a - .build_ssh_project(path!("/code"), client_ssh.clone(), cx_a) + .build_ssh_project(path!("/code"), client_ssh.clone(), false, cx_a) .await; let (workspace, cx_a) = client_a.build_workspace(&project_a, cx_a); @@ -841,3 +852,261 @@ async fn test_slow_adapter_startup_retries( shutdown_session.await.unwrap(); } + +#[gpui::test] +async fn test_ssh_remote_worktree_trust(cx_a: &mut TestAppContext, server_cx: &mut TestAppContext) { + use project::trusted_worktrees::RemoteHostLocation; + + cx_a.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + project::trusted_worktrees::init(HashMap::default(), None, None, cx); + }); + server_cx.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + project::trusted_worktrees::init(HashMap::default(), None, None, cx); + }); + + let mut server = TestServer::start(cx_a.executor().clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + + let server_name = "override-rust-analyzer"; + let lsp_inlay_hint_request_count = Arc::new(AtomicUsize::new(0)); + + let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree( + path!("/projects"), + json!({ + "project_a": { + ".zed": { + "settings.json": r#"{"languages":{"Rust":{"language_servers":["override-rust-analyzer"]}}}"# + }, + "main.rs": "fn main() {}" + }, + "project_b": { "lib.rs": "pub fn lib() {}" } + }), + ) + .await; + + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + languages.add(rust_lang()); + + let capabilities = lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }; + let mut fake_language_servers = languages.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: server_name, + capabilities: capabilities.clone(), + initializer: Some(Box::new({ + let lsp_inlay_hint_request_count = lsp_inlay_hint_request_count.clone(); + move |fake_server| { + let lsp_inlay_hint_request_count = lsp_inlay_hint_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + lsp_inlay_hint_request_count.fetch_add(1, Ordering::Release); + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 0), + label: lsp::InlayHintLabel::String("hint".to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + let _headless_project = server_cx.new(|cx| { + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + extension_host_proxy: Arc::new(ExtensionHostProxy::new()), + }, + true, + cx, + ) + }); + + let client_ssh = RemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id_a) = client_a + .build_ssh_project(path!("/projects/project_a"), client_ssh.clone(), true, cx_a) + .await; + + cx_a.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + let language_settings = &mut settings.project.all_languages.defaults; + language_settings.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + }); + }); + + project_a + .update(cx_a, |project, cx| { + project.languages().add(rust_lang()); + project.languages().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: server_name, + capabilities, + ..FakeLspAdapter::default() + }, + ); + project.find_or_create_worktree(path!("/projects/project_b"), true, cx) + }) + .await + .unwrap(); + + cx_a.run_until_parked(); + + let worktree_ids = project_a.read_with(cx_a, |project, cx| { + project + .worktrees(cx) + .map(|wt| wt.read(cx).id()) + .collect::>() + }); + assert_eq!(worktree_ids.len(), 2); + + let remote_host = project_a.read_with(cx_a, |project, cx| { + project + .remote_connection_options(cx) + .map(RemoteHostLocation::from) + }); + + let trusted_worktrees = + cx_a.update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should exist")); + + let can_trust_a = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[0], cx)); + let can_trust_b = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[1], cx)); + assert!(!can_trust_a, "project_a should be restricted initially"); + assert!(!can_trust_b, "project_b should be restricted initially"); + + let worktree_store = project_a.read_with(cx_a, |project, _| project.worktree_store()); + let has_restricted = trusted_worktrees.read_with(cx_a, |store, cx| { + store.has_restricted_worktrees(&worktree_store, cx) + }); + assert!(has_restricted, "should have restricted worktrees"); + + let buffer_before_approval = project_a + .update(cx_a, |project, cx| { + project.open_buffer((worktree_id_a, rel_path("main.rs")), cx) + }) + .await + .unwrap(); + + let (editor, cx_a) = cx_a.add_window_view(|window, cx| { + Editor::new( + EditorMode::full(), + cx.new(|cx| MultiBuffer::singleton(buffer_before_approval.clone(), cx)), + Some(project_a.clone()), + window, + cx, + ) + }); + cx_a.run_until_parked(); + let fake_language_server = fake_language_servers.next(); + + cx_a.read(|cx| { + let file = buffer_before_approval.read(cx).file(); + assert_eq!( + language_settings(Some("Rust".into()), file, cx).language_servers, + ["...".to_string()], + "remote .zed/settings.json must not sync before trust approval" + ) + }); + + editor.update_in(cx_a, |editor, window, cx| { + editor.handle_input("1", window, cx); + }); + cx_a.run_until_parked(); + cx_a.executor().advance_clock(Duration::from_secs(1)); + assert_eq!( + lsp_inlay_hint_request_count.load(Ordering::Acquire), + 0, + "inlay hints must not be queried before trust approval" + ); + + trusted_worktrees.update(cx_a, |store, cx| { + store.trust( + HashSet::from_iter([PathTrust::Worktree(worktree_ids[0])]), + remote_host.clone(), + cx, + ); + }); + cx_a.run_until_parked(); + + cx_a.read(|cx| { + let file = buffer_before_approval.read(cx).file(); + assert_eq!( + language_settings(Some("Rust".into()), file, cx).language_servers, + ["override-rust-analyzer".to_string()], + "remote .zed/settings.json should sync after trust approval" + ) + }); + let _fake_language_server = fake_language_server.await.unwrap(); + editor.update_in(cx_a, |editor, window, cx| { + editor.handle_input("1", window, cx); + }); + cx_a.run_until_parked(); + cx_a.executor().advance_clock(Duration::from_secs(1)); + assert!( + lsp_inlay_hint_request_count.load(Ordering::Acquire) > 0, + "inlay hints should be queried after trust approval" + ); + + let can_trust_a = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[0], cx)); + let can_trust_b = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[1], cx)); + assert!(can_trust_a, "project_a should be trusted after trust()"); + assert!(!can_trust_b, "project_b should still be restricted"); + + trusted_worktrees.update(cx_a, |store, cx| { + store.trust( + HashSet::from_iter([PathTrust::Worktree(worktree_ids[1])]), + remote_host.clone(), + cx, + ); + }); + + let can_trust_a = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[0], cx)); + let can_trust_b = + trusted_worktrees.update(cx_a, |store, cx| store.can_trust(worktree_ids[1], cx)); + assert!(can_trust_a, "project_a should remain trusted"); + assert!(can_trust_b, "project_b should now be trusted"); + + let has_restricted_after = trusted_worktrees.read_with(cx_a, |store, cx| { + store.has_restricted_worktrees(&worktree_store, cx) + }); + assert!( + !has_restricted_after, + "should have no restricted worktrees after trusting both" + ); +} diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 0afaccf80cae609ae3264b9b5e374964b01b6f38..3abbd1a014b556db02e70b42c239729100f17eb8 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -31,7 +31,6 @@ use rpc::{ RECEIVE_TIMEOUT, proto::{self, ChannelRole}, }; -use semantic_version::SemanticVersion; use serde_json::json; use session::{AppSession, Session}; use settings::SettingsStore; @@ -173,7 +172,7 @@ impl TestServer { let settings = SettingsStore::test(cx); cx.set_global(settings); theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); let clock = Arc::new(FakeSystemClock::new()); @@ -295,7 +294,7 @@ impl TestServer { server_conn, client_name, Principal::User(user), - ZedVersion(SemanticVersion::new(1, 0, 0)), + ZedVersion(semver::Version::new(1, 0, 0)), Some("test".to_string()), None, None, @@ -762,6 +761,7 @@ impl TestClient { &self, root_path: impl AsRef, ssh: Entity, + init_worktree_trust: bool, cx: &mut TestAppContext, ) -> (Entity, WorktreeId) { let project = cx.update(|cx| { @@ -772,6 +772,7 @@ impl TestClient { self.app_state.user_store.clone(), self.app_state.languages.clone(), self.app_state.fs.clone(), + init_worktree_trust, cx, ) }); @@ -840,6 +841,7 @@ impl TestClient { self.app_state.languages.clone(), self.app_state.fs.clone(), None, + false, cx, ) }) diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index 5db588fdb3aad3f523864b5f90600e49eca9d8b6..8959c6ccbe88d1f3f78fb29009904244624d9999 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -11,7 +11,7 @@ use editor::{ display_map::ToDisplayPoint, scroll::Autoscroll, }; use gpui::{ - AnyView, App, ClipboardItem, Context, Entity, EventEmitter, Focusable, Pixels, Point, Render, + App, ClipboardItem, Context, Entity, EventEmitter, Focusable, Pixels, Point, Render, Subscription, Task, VisualContext as _, WeakEntity, Window, actions, }; use project::Project; @@ -25,7 +25,7 @@ use util::ResultExt; use workspace::{CollaboratorId, item::TabContentParams}; use workspace::{ ItemNavHistory, Pane, SaveIntent, Toast, ViewId, Workspace, WorkspaceId, - item::{FollowableItem, Item, ItemEvent, ItemHandle}, + item::{FollowableItem, Item, ItemEvent}, searchable::SearchableItemHandle, }; use workspace::{item::Dedup, notifications::NotificationId}; @@ -441,11 +441,11 @@ impl Item for ChannelView { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } @@ -541,7 +541,7 @@ impl Item for ChannelView { }) } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 97559f41a76bed36f22d3ef22bc95d913a462116..0ae4ff270bd672ca028d638484b9a23f5981de1a 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -32,12 +32,12 @@ use std::{mem, sync::Arc}; use theme::{ActiveTheme, ThemeSettings}; use ui::{ Avatar, AvatarAvailabilityIndicator, Button, Color, ContextMenu, Facepile, HighlightedLabel, - Icon, IconButton, IconName, IconSize, Indicator, Label, ListHeader, ListItem, Tooltip, + Icon, IconButton, IconName, IconSize, Indicator, Label, ListHeader, ListItem, Tab, Tooltip, prelude::*, tooltip_container, }; use util::{ResultExt, TryFutureExt, maybe}; use workspace::{ - Deafen, LeaveCall, Mute, OpenChannelNotes, ScreenShare, ShareProject, Workspace, + CopyRoomId, Deafen, LeaveCall, Mute, OpenChannelNotes, ScreenShare, ShareProject, Workspace, dock::{DockPosition, Panel, PanelEvent}, notifications::{DetachAndPromptErr, NotifyResultExt}, }; @@ -109,25 +109,37 @@ pub fn init(cx: &mut App) { }); // TODO: make it possible to bind this one to a held key for push to talk? // how to make "toggle_on_modifiers_press" contextual? - workspace.register_action(|_, _: &Mute, window, cx| { - let room = ActiveCall::global(cx).read(cx).room().cloned(); - if let Some(room) = room { - window.defer(cx, move |_window, cx| { - room.update(cx, |room, cx| room.toggle_mute(cx)) - }); - } - }); - workspace.register_action(|_, _: &Deafen, window, cx| { - let room = ActiveCall::global(cx).read(cx).room().cloned(); - if let Some(room) = room { - window.defer(cx, move |_window, cx| { - room.update(cx, |room, cx| room.toggle_deafen(cx)) - }); - } - }); + workspace.register_action(|_, _: &Mute, _, cx| title_bar::collab::toggle_mute(cx)); + workspace.register_action(|_, _: &Deafen, _, cx| title_bar::collab::toggle_deafen(cx)); workspace.register_action(|_, _: &LeaveCall, window, cx| { CollabPanel::leave_call(window, cx); }); + workspace.register_action(|workspace, _: &CopyRoomId, window, cx| { + use workspace::notifications::{NotificationId, NotifyTaskExt as _}; + + struct RoomIdCopiedToast; + + if let Some(room) = ActiveCall::global(cx).read(cx).room() { + let romo_id_fut = room.read(cx).room_id(); + cx.spawn(async move |workspace, cx| { + let room_id = romo_id_fut.await.context("Failed to get livekit room")?; + workspace.update(cx, |workspace, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(room_id)); + workspace.show_toast( + workspace::Toast::new( + NotificationId::unique::(), + "Room ID copied to clipboard", + ) + .autohide(), + cx, + ); + }) + }) + .detach_and_notify_err(window, cx); + } else { + workspace.show_error(&"There’s no active call; join one first.", cx); + } + }); workspace.register_action(|workspace, _: &ShareProject, window, cx| { let project = workspace.project().clone(); println!("{project:?}"); @@ -287,7 +299,7 @@ impl CollabPanel { cx.new(|cx| { let filter_editor = cx.new(|cx| { let mut editor = Editor::single_line(window, cx); - editor.set_placeholder_text("Filter...", window, cx); + editor.set_placeholder_text("Search channels…", window, cx); editor }); @@ -672,20 +684,25 @@ impl CollabPanel { { self.entries.push(ListEntry::ChannelEditor { depth: 0 }); } + + let should_respect_collapse = query.is_empty(); let mut collapse_depth = None; + for (idx, channel) in channels.into_iter().enumerate() { let depth = channel.parent_path.len(); - if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) { - collapse_depth = Some(depth); - } else if let Some(collapsed_depth) = collapse_depth { - if depth > collapsed_depth { - continue; - } - if self.is_channel_collapsed(channel.id) { + if should_respect_collapse { + if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) { collapse_depth = Some(depth); - } else { - collapse_depth = None; + } else if let Some(collapsed_depth) = collapse_depth { + if depth > collapsed_depth { + continue; + } + if self.is_channel_collapsed(channel.id) { + collapse_depth = Some(depth); + } else { + collapse_depth = None; + } } } @@ -1235,7 +1252,7 @@ impl CollabPanel { context_menu }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe_in( &context_menu, window, @@ -1407,7 +1424,7 @@ impl CollabPanel { context_menu }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe_in( &context_menu, window, @@ -1470,7 +1487,7 @@ impl CollabPanel { }) }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe_in( &context_menu, window, @@ -1491,7 +1508,7 @@ impl CollabPanel { fn reset_filter_editor_text(&mut self, window: &mut Window, cx: &mut Context) -> bool { self.filter_editor.update(cx, |editor, cx| { - if editor.buffer().read(cx).len(cx) > 0 { + if editor.buffer().read(cx).len(cx).0 > 0 { editor.set_text("", window, cx); true } else { @@ -1504,9 +1521,9 @@ impl CollabPanel { if cx.stop_active_drag(window) { return; } else if self.take_editing_state(window, cx) { - window.focus(&self.filter_editor.focus_handle(cx)); + window.focus(&self.filter_editor.focus_handle(cx), cx); } else if !self.reset_filter_editor_text(window, cx) { - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } if self.context_menu.is_some() { @@ -1809,7 +1826,7 @@ impl CollabPanel { }); self.update_entries(false, cx); self.select_channel_editor(); - window.focus(&self.channel_name_editor.focus_handle(cx)); + window.focus(&self.channel_name_editor.focus_handle(cx), cx); cx.notify(); } @@ -1834,7 +1851,7 @@ impl CollabPanel { }); self.update_entries(false, cx); self.select_channel_editor(); - window.focus(&self.channel_name_editor.focus_handle(cx)); + window.focus(&self.channel_name_editor.focus_handle(cx), cx); cx.notify(); } @@ -1883,7 +1900,7 @@ impl CollabPanel { editor.set_text(channel.name.clone(), window, cx); editor.select_all(&Default::default(), window, cx); }); - window.focus(&self.channel_name_editor.focus_handle(cx)); + window.focus(&self.channel_name_editor.focus_handle(cx), cx); self.update_entries(false, cx); self.select_channel_editor(); } @@ -2407,6 +2424,21 @@ impl CollabPanel { }); v_flex() .size_full() + .gap_1() + .child( + h_flex() + .p_2() + .h(Tab::container_height(cx)) + .gap_1p5() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(self.render_filter_input(&self.filter_editor, cx)), + ) .child( list( self.list_state.clone(), @@ -2414,15 +2446,6 @@ impl CollabPanel { ) .size_full(), ) - .child( - v_flex() - .child(div().mx_2().border_primary(cx).border_t_1()) - .child( - v_flex() - .p_2() - .child(self.render_filter_input(&self.filter_editor, cx)), - ), - ) } fn render_filter_input( diff --git a/crates/collab_ui/src/collab_panel/channel_modal.rs b/crates/collab_ui/src/collab_panel/channel_modal.rs index e558835dbaf0e34e2efa1b4f64fd8f6cb96016c5..ae5b537f2c66dc273d504a70f2b75cb8bec0be20 100644 --- a/crates/collab_ui/src/collab_panel/channel_modal.rs +++ b/crates/collab_ui/src/collab_panel/channel_modal.rs @@ -10,7 +10,7 @@ use gpui::{ }; use picker::{Picker, PickerDelegate}; use std::sync::Arc; -use ui::{Avatar, CheckboxWithLabel, ContextMenu, ListItem, ListItemSpacing, prelude::*}; +use ui::{Avatar, Checkbox, ContextMenu, ListItem, ListItemSpacing, prelude::*}; use util::TryFutureExt; use workspace::{ModalView, notifications::DetachAndPromptErr}; @@ -165,16 +165,18 @@ impl Render for ChannelModal { .h(rems_from_px(22.)) .justify_between() .line_height(rems(1.25)) - .child(CheckboxWithLabel::new( - "is-public", - Label::new("Public").size(LabelSize::Small), - if visibility == ChannelVisibility::Public { - ui::ToggleState::Selected - } else { - ui::ToggleState::Unselected - }, - cx.listener(Self::set_channel_visibility), - )) + .child( + Checkbox::new( + "is-public", + if visibility == ChannelVisibility::Public { + ui::ToggleState::Selected + } else { + ui::ToggleState::Unselected + }, + ) + .label("Public") + .on_click(cx.listener(Self::set_channel_visibility)), + ) .children( Some( Button::new("copy-link", "Copy Link") @@ -640,7 +642,7 @@ impl ChannelModalDelegate { }); menu }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe_in( &context_menu, window, diff --git a/crates/command_palette/Cargo.toml b/crates/command_palette/Cargo.toml index f21c202721fa29644e17df499fcfb288a72dc492..bd86c10a8071896f0b24ea531d354c0e46114d48 100644 --- a/crates/command_palette/Cargo.toml +++ b/crates/command_palette/Cargo.toml @@ -8,6 +8,9 @@ license = "GPL-3.0-or-later" [lints] workspace = true +[features] +test-support = ["db/test-support"] + [lib] path = "src/command_palette.rs" doctest = false diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index d971bca1f01e878d7517c1d13f525dfbf8e47afa..038b58ac5f4e90544232ccc8da55d0ca71ec28df 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -2,7 +2,7 @@ mod persistence; use std::{ cmp::{self, Reverse}, - collections::HashMap, + collections::{HashMap, VecDeque}, sync::Arc, time::Duration, }; @@ -19,6 +19,7 @@ use gpui::{ ParentElement, Render, Styled, Task, WeakEntity, Window, }; use persistence::COMMAND_PALETTE_HISTORY; +use picker::Direction; use picker::{Picker, PickerDelegate}; use postage::{sink::Sink, stream::Stream}; use settings::Settings; @@ -163,6 +164,7 @@ pub struct CommandPaletteDelegate { Task<()>, postage::dispatch::Receiver<(Vec, Vec, CommandInterceptResult)>, )>, + query_history: QueryHistory, } struct Command { @@ -170,6 +172,91 @@ struct Command { action: Box, } +#[derive(Default)] +struct QueryHistory { + history: Option>, + cursor: Option, + prefix: Option, +} + +impl QueryHistory { + fn history(&mut self) -> &mut VecDeque { + self.history.get_or_insert_with(|| { + COMMAND_PALETTE_HISTORY + .list_recent_queries() + .unwrap_or_default() + .into_iter() + .collect() + }) + } + + fn add(&mut self, query: String) { + if let Some(pos) = self.history().iter().position(|h| h == &query) { + self.history().remove(pos); + } + self.history().push_back(query); + self.cursor = None; + self.prefix = None; + } + + fn validate_cursor(&mut self, current_query: &str) -> Option { + if let Some(pos) = self.cursor { + if self.history().get(pos).map(|s| s.as_str()) != Some(current_query) { + self.cursor = None; + self.prefix = None; + } + } + self.cursor + } + + fn previous(&mut self, current_query: &str) -> Option<&str> { + if self.validate_cursor(current_query).is_none() { + self.prefix = Some(current_query.to_string()); + } + + let prefix = self.prefix.clone().unwrap_or_default(); + let start_index = self.cursor.unwrap_or(self.history().len()); + + for i in (0..start_index).rev() { + if self + .history() + .get(i) + .is_some_and(|e| e.starts_with(&prefix)) + { + self.cursor = Some(i); + return self.history().get(i).map(|s| s.as_str()); + } + } + None + } + + fn next(&mut self, current_query: &str) -> Option<&str> { + let selected = self.validate_cursor(current_query)?; + let prefix = self.prefix.clone().unwrap_or_default(); + + for i in (selected + 1)..self.history().len() { + if self + .history() + .get(i) + .is_some_and(|e| e.starts_with(&prefix)) + { + self.cursor = Some(i); + return self.history().get(i).map(|s| s.as_str()); + } + } + None + } + + fn reset_cursor(&mut self) { + self.cursor = None; + self.prefix = None; + } + + fn is_navigating(&self) -> bool { + self.cursor.is_some() + } +} + impl Clone for Command { fn clone(&self) -> Self { Self { @@ -196,6 +283,7 @@ impl CommandPaletteDelegate { previous_focus_handle, latest_query: String::new(), updating_matches: None, + query_history: Default::default(), } } @@ -271,6 +359,11 @@ impl CommandPaletteDelegate { // so we need to return an Option here self.commands.get(action_ix) } + + #[cfg(any(test, feature = "test-support"))] + pub fn seed_history(&mut self, queries: &[&str]) { + self.query_history.history = Some(queries.iter().map(|s| s.to_string()).collect()); + } } impl PickerDelegate for CommandPaletteDelegate { @@ -280,6 +373,38 @@ impl PickerDelegate for CommandPaletteDelegate { "Execute a command...".into() } + fn select_history( + &mut self, + direction: Direction, + query: &str, + _window: &mut Window, + _cx: &mut App, + ) -> Option { + match direction { + Direction::Up => { + let should_use_history = + self.selected_ix == 0 || self.query_history.is_navigating(); + if should_use_history { + if let Some(query) = self.query_history.previous(query).map(|s| s.to_string()) { + return Some(query); + } + } + } + Direction::Down => { + if self.query_history.is_navigating() { + if let Some(query) = self.query_history.next(query).map(|s| s.to_string()) { + return Some(query); + } else { + let prefix = self.query_history.prefix.take().unwrap_or_default(); + self.query_history.reset_cursor(); + return Some(prefix); + } + } + } + } + None + } + fn match_count(&self) -> usize { self.matches.len() } @@ -439,6 +564,12 @@ impl PickerDelegate for CommandPaletteDelegate { self.dismissed(window, cx); return; } + + if !self.latest_query.is_empty() { + self.query_history.add(self.latest_query.clone()); + self.query_history.reset_cursor(); + } + let action_ix = self.matches[self.selected_ix].candidate_id; let command = self.commands.swap_remove(action_ix); telemetry::event!( @@ -457,7 +588,7 @@ impl PickerDelegate for CommandPaletteDelegate { }) .detach_and_log_err(cx); let action = command.action; - window.focus(&self.previous_focus_handle); + window.focus(&self.previous_focus_handle, cx); self.dismissed(window, cx); window.dispatch_action(action, cx); } @@ -588,7 +719,7 @@ mod tests { use super::*; use editor::Editor; use go_to_line::GoToLine; - use gpui::TestAppContext; + use gpui::{TestAppContext, VisualTestContext}; use language::Point; use project::Project; use settings::KeymapFile; @@ -653,7 +784,7 @@ mod tests { workspace.update_in(cx, |workspace, window, cx| { workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); - editor.update(cx, |editor, cx| window.focus(&editor.focus_handle(cx))) + editor.update(cx, |editor, cx| window.focus(&editor.focus_handle(cx), cx)) }); cx.simulate_keystrokes("cmd-shift-p"); @@ -724,7 +855,7 @@ mod tests { workspace.update_in(cx, |workspace, window, cx| { workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx); - editor.update(cx, |editor, cx| window.focus(&editor.focus_handle(cx))) + editor.update(cx, |editor, cx| window.focus(&editor.focus_handle(cx), cx)) }); // Test normalize (trimming whitespace and double colons) @@ -799,7 +930,9 @@ mod tests { "bindings": { "cmd-n": "workspace::NewFile", "enter": "menu::Confirm", - "cmd-shift-p": "command_palette::Toggle" + "cmd-shift-p": "command_palette::Toggle", + "up": "menu::SelectPrevious", + "down": "menu::SelectNext" } } ]"#, @@ -808,4 +941,264 @@ mod tests { app_state }) } + + fn open_palette_with_history( + workspace: &Entity, + history: &[&str], + cx: &mut VisualTestContext, + ) -> Entity> { + cx.simulate_keystrokes("cmd-shift-p"); + cx.run_until_parked(); + + let palette = workspace.update(cx, |workspace, cx| { + workspace + .active_modal::(cx) + .unwrap() + .read(cx) + .picker + .clone() + }); + + palette.update(cx, |palette, _cx| { + palette.delegate.seed_history(history); + }); + + palette + } + + #[gpui::test] + async fn test_history_navigation_basic(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = open_palette_with_history(&workspace, &["backspace", "select all"], cx); + + // Query should be empty initially + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), ""); + }); + + // Press up - should load most recent query "select all" + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select all"); + }); + + // Press up again - should load "backspace" + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "backspace"); + }); + + // Press down - should go back to "select all" + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select all"); + }); + + // Press down again - should clear query (exit history mode) + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), ""); + }); + } + + #[gpui::test] + async fn test_history_mode_exit_on_typing(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = open_palette_with_history(&workspace, &["backspace"], cx); + + // Press up to enter history mode + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "backspace"); + }); + + // Type something - should append to the history query + cx.simulate_input("x"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "backspacex"); + }); + } + + #[gpui::test] + async fn test_history_navigation_with_suggestions(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = open_palette_with_history(&workspace, &["editor: close", "editor: open"], cx); + + // Open palette with a query that has multiple matches + cx.simulate_input("editor"); + cx.background_executor.run_until_parked(); + + // Should have multiple matches, selected_ix should be 0 + palette.read_with(cx, |palette, _| { + assert!(palette.delegate.matches.len() > 1); + assert_eq!(palette.delegate.selected_ix, 0); + }); + + // Press down - should navigate to next suggestion (not history) + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, _| { + assert_eq!(palette.delegate.selected_ix, 1); + }); + + // Press up - should go back to first suggestion + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, _| { + assert_eq!(palette.delegate.selected_ix, 0); + }); + + // Press up again at top - should enter history mode and show previous query + // that matches the "editor" prefix + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "editor: open"); + }); + } + + #[gpui::test] + async fn test_history_prefix_search(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = open_palette_with_history( + &workspace, + &["open file", "select all", "select line", "backspace"], + cx, + ); + + // Type "sel" as a prefix + cx.simulate_input("sel"); + cx.background_executor.run_until_parked(); + + // Press up - should get "select line" (most recent matching "sel") + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select line"); + }); + + // Press up again - should get "select all" (next matching "sel") + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select all"); + }); + + // Press up again - should stay at "select all" (no more matches for "sel") + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select all"); + }); + + // Press down - should go back to "select line" + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "select line"); + }); + + // Press down again - should return to original prefix "sel" + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "sel"); + }); + } + + #[gpui::test] + async fn test_history_prefix_search_no_matches(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = + open_palette_with_history(&workspace, &["open file", "backspace", "select all"], cx); + + // Type "xyz" as a prefix that doesn't match anything + cx.simulate_input("xyz"); + cx.background_executor.run_until_parked(); + + // Press up - should stay at "xyz" (no matches) + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "xyz"); + }); + } + + #[gpui::test] + async fn test_history_empty_prefix_searches_all(cx: &mut TestAppContext) { + let app_state = init_test(cx); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let palette = open_palette_with_history(&workspace, &["alpha", "beta", "gamma"], cx); + + // With empty query, press up - should get "gamma" (most recent) + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "gamma"); + }); + + // Press up - should get "beta" + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "beta"); + }); + + // Press up - should get "alpha" + cx.simulate_keystrokes("up"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "alpha"); + }); + + // Press down - should get "beta" + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "beta"); + }); + + // Press down - should get "gamma" + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), "gamma"); + }); + + // Press down - should return to empty string (exit history mode) + cx.simulate_keystrokes("down"); + cx.background_executor.run_until_parked(); + palette.read_with(cx, |palette, cx| { + assert_eq!(palette.query(cx), ""); + }); + } } diff --git a/crates/command_palette/src/persistence.rs b/crates/command_palette/src/persistence.rs index feaed72570d56f4895ff05eef891fc81c2e5e0b6..4556079b4f9c8e7a989f3e32eac6f7d084e67a4e 100644 --- a/crates/command_palette/src/persistence.rs +++ b/crates/command_palette/src/persistence.rs @@ -123,6 +123,16 @@ impl CommandPaletteDB { ORDER BY COUNT(1) DESC } } + + query! { + pub fn list_recent_queries() -> Result> { + SELECT user_query + FROM command_invocations + WHERE user_query != "" + GROUP BY user_query + ORDER BY MAX(last_invoked) ASC + } + } } #[cfg(test)] diff --git a/crates/context_server/Cargo.toml b/crates/context_server/Cargo.toml index 846a53fde4b6f87493ec2b75da6c08d2b081df47..539b873c3527b5a01f1dfcf7b768f0758dc869b5 100644 --- a/crates/context_server/Cargo.toml +++ b/crates/context_server/Cargo.toml @@ -12,7 +12,7 @@ workspace = true path = "src/context_server.rs" [features] -test-support = [] +test-support = ["gpui/test-support"] [dependencies] anyhow.workspace = true @@ -20,6 +20,7 @@ async-trait.workspace = true collections.workspace = true futures.workspace = true gpui.workspace = true +http_client = { workspace = true, features = ["test-support"] } log.workspace = true net.workspace = true parking_lot.workspace = true @@ -28,7 +29,12 @@ schemars.workspace = true serde_json.workspace = true serde.workspace = true settings.workspace = true +slotmap.workspace = true smol.workspace = true tempfile.workspace = true url = { workspace = true, features = ["serde"] } util.workspace = true +terminal.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/context_server/src/client.rs b/crates/context_server/src/client.rs index f891e96250f3334540aa859fe438c87297fc0100..605f24178916faa5173c32c28be6c80ee625cb6c 100644 --- a/crates/context_server/src/client.rs +++ b/crates/context_server/src/client.rs @@ -6,6 +6,7 @@ use parking_lot::Mutex; use postage::barrier; use serde::{Deserialize, Serialize, de::DeserializeOwned}; use serde_json::{Value, value::RawValue}; +use slotmap::SlotMap; use smol::channel; use std::{ fmt, @@ -50,7 +51,7 @@ pub(crate) struct Client { next_id: AtomicI32, outbound_tx: channel::Sender, name: Arc, - notification_handlers: Arc>>, + subscription_set: Arc>, response_handlers: Arc>>>, #[allow(clippy::type_complexity)] #[allow(dead_code)] @@ -191,21 +192,20 @@ impl Client { let (outbound_tx, outbound_rx) = channel::unbounded::(); let (output_done_tx, output_done_rx) = barrier::channel(); - let notification_handlers = - Arc::new(Mutex::new(HashMap::<_, NotificationHandler>::default())); + let subscription_set = Arc::new(Mutex::new(NotificationSubscriptionSet::default())); let response_handlers = Arc::new(Mutex::new(Some(HashMap::<_, ResponseHandler>::default()))); let request_handlers = Arc::new(Mutex::new(HashMap::<_, RequestHandler>::default())); let receive_input_task = cx.spawn({ - let notification_handlers = notification_handlers.clone(); + let subscription_set = subscription_set.clone(); let response_handlers = response_handlers.clone(); let request_handlers = request_handlers.clone(); let transport = transport.clone(); async move |cx| { Self::handle_input( transport, - notification_handlers, + subscription_set, request_handlers, response_handlers, cx, @@ -236,7 +236,7 @@ impl Client { Ok(Self { server_id, - notification_handlers, + subscription_set, response_handlers, name: server_name, next_id: Default::default(), @@ -257,7 +257,7 @@ impl Client { /// to pending requests) and notifications (which trigger registered handlers). async fn handle_input( transport: Arc, - notification_handlers: Arc>>, + subscription_set: Arc>, request_handlers: Arc>>, response_handlers: Arc>>>, cx: &mut AsyncApp, @@ -282,10 +282,11 @@ impl Client { handler(Ok(message.to_string())); } } else if let Ok(notification) = serde_json::from_str::(&message) { - let mut notification_handlers = notification_handlers.lock(); - if let Some(handler) = notification_handlers.get_mut(notification.method.as_str()) { - handler(notification.params.unwrap_or(Value::Null), cx.clone()); - } + subscription_set.lock().notify( + ¬ification.method, + notification.params.unwrap_or(Value::Null), + cx, + ) } else { log::error!("Unhandled JSON from context_server: {}", message); } @@ -451,12 +452,18 @@ impl Client { Ok(()) } + #[must_use] pub fn on_notification( &self, method: &'static str, f: Box, - ) { - self.notification_handlers.lock().insert(method, f); + ) -> NotificationSubscription { + let mut notification_subscriptions = self.subscription_set.lock(); + + NotificationSubscription { + id: notification_subscriptions.add_handler(method, f), + set: self.subscription_set.clone(), + } } } @@ -485,3 +492,73 @@ impl fmt::Debug for Client { .finish_non_exhaustive() } } + +slotmap::new_key_type! { + struct NotificationSubscriptionId; +} + +#[derive(Default)] +pub struct NotificationSubscriptionSet { + // we have very few subscriptions at the moment + methods: Vec<(&'static str, Vec)>, + handlers: SlotMap, +} + +impl NotificationSubscriptionSet { + #[must_use] + fn add_handler( + &mut self, + method: &'static str, + handler: NotificationHandler, + ) -> NotificationSubscriptionId { + let id = self.handlers.insert(handler); + if let Some((_, handler_ids)) = self + .methods + .iter_mut() + .find(|(probe_method, _)| method == *probe_method) + { + debug_assert!( + handler_ids.len() < 20, + "Too many MCP handlers for {}. Consider using a different data structure.", + method + ); + + handler_ids.push(id); + } else { + self.methods.push((method, vec![id])); + }; + id + } + + fn notify(&mut self, method: &str, payload: Value, cx: &mut AsyncApp) { + let Some((_, handler_ids)) = self + .methods + .iter_mut() + .find(|(probe_method, _)| method == *probe_method) + else { + return; + }; + + for handler_id in handler_ids { + if let Some(handler) = self.handlers.get_mut(*handler_id) { + handler(payload.clone(), cx.clone()); + } + } + } +} + +pub struct NotificationSubscription { + id: NotificationSubscriptionId, + set: Arc>, +} + +impl Drop for NotificationSubscription { + fn drop(&mut self) { + let mut set = self.set.lock(); + set.handlers.remove(self.id); + set.methods.retain_mut(|(_, handler_ids)| { + handler_ids.retain(|id| *id != self.id); + !handler_ids.is_empty() + }); + } +} diff --git a/crates/context_server/src/context_server.rs b/crates/context_server/src/context_server.rs index 52ed524220947430df3e63fced367ca4eb223fff..92804549c69b01dd3729efb3a0b47905cd73d813 100644 --- a/crates/context_server/src/context_server.rs +++ b/crates/context_server/src/context_server.rs @@ -6,6 +6,8 @@ pub mod test; pub mod transport; pub mod types; +use collections::HashMap; +use http_client::HttpClient; use std::path::Path; use std::sync::Arc; use std::{fmt::Display, path::PathBuf}; @@ -15,6 +17,9 @@ use client::Client; use gpui::AsyncApp; use parking_lot::RwLock; pub use settings::ContextServerCommand; +use url::Url; + +use crate::transport::HttpTransport; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ContextServerId(pub Arc); @@ -52,6 +57,25 @@ impl ContextServer { } } + pub fn http( + id: ContextServerId, + endpoint: &Url, + headers: HashMap, + http_client: Arc, + executor: gpui::BackgroundExecutor, + ) -> Result { + let transport = match endpoint.scheme() { + "http" | "https" => { + log::info!("Using HTTP transport for {}", endpoint); + let transport = + HttpTransport::new(http_client, endpoint.to_string(), headers, executor); + Arc::new(transport) as _ + } + _ => anyhow::bail!("unsupported MCP url scheme {}", endpoint.scheme()), + }; + Ok(Self::new(id, transport)) + } + pub fn new(id: ContextServerId, transport: Arc) -> Self { Self { id, @@ -72,22 +96,6 @@ impl ContextServer { self.initialize(self.new_client(cx)?).await } - /// Starts the context server, making sure handlers are registered before initialization happens - pub async fn start_with_handlers( - &self, - notification_handlers: Vec<( - &'static str, - Box, - )>, - cx: &AsyncApp, - ) -> Result<()> { - let client = self.new_client(cx)?; - for (method, handler) in notification_handlers { - client.on_notification(method, handler); - } - self.initialize(client).await - } - fn new_client(&self, cx: &AsyncApp) -> Result { Ok(match &self.configuration { ContextServerTransport::Stdio(command, working_directory) => Client::stdio( diff --git a/crates/context_server/src/protocol.rs b/crates/context_server/src/protocol.rs index 5355f20f620b5bed76bf945e863fdb5cbcc2ff43..a218a8a3e0e6352997e4152214077cb3851317b3 100644 --- a/crates/context_server/src/protocol.rs +++ b/crates/context_server/src/protocol.rs @@ -12,7 +12,7 @@ use futures::channel::oneshot; use gpui::AsyncApp; use serde_json::Value; -use crate::client::Client; +use crate::client::{Client, NotificationSubscription}; use crate::types::{self, Notification, Request}; pub struct ModelContextProtocol { @@ -119,7 +119,7 @@ impl InitializedContextServerProtocol { &self, method: &'static str, f: Box, - ) { - self.inner.on_notification(method, f); + ) -> NotificationSubscription { + self.inner.on_notification(method, f) } } diff --git a/crates/context_server/src/transport.rs b/crates/context_server/src/transport.rs index b4f56b0ef03ac6adf4ee81f878818ec3fecc5ef9..a3d6f998d49872c44513da00c506b68534c36b65 100644 --- a/crates/context_server/src/transport.rs +++ b/crates/context_server/src/transport.rs @@ -1,11 +1,12 @@ +pub mod http; mod stdio_transport; -use std::pin::Pin; - use anyhow::Result; use async_trait::async_trait; use futures::Stream; +use std::pin::Pin; +pub use http::*; pub use stdio_transport::*; #[async_trait] diff --git a/crates/context_server/src/transport/http.rs b/crates/context_server/src/transport/http.rs new file mode 100644 index 0000000000000000000000000000000000000000..70248f0278fcf80024d75d7f78cae5c29f26cc43 --- /dev/null +++ b/crates/context_server/src/transport/http.rs @@ -0,0 +1,259 @@ +use anyhow::{Result, anyhow}; +use async_trait::async_trait; +use collections::HashMap; +use futures::{Stream, StreamExt}; +use gpui::BackgroundExecutor; +use http_client::{AsyncBody, HttpClient, Request, Response, http::Method}; +use parking_lot::Mutex as SyncMutex; +use smol::channel; +use std::{pin::Pin, sync::Arc}; + +use crate::transport::Transport; + +// Constants from MCP spec +const HEADER_SESSION_ID: &str = "Mcp-Session-Id"; +const EVENT_STREAM_MIME_TYPE: &str = "text/event-stream"; +const JSON_MIME_TYPE: &str = "application/json"; + +/// HTTP Transport with session management and SSE support +pub struct HttpTransport { + http_client: Arc, + endpoint: String, + session_id: Arc>>, + executor: BackgroundExecutor, + response_tx: channel::Sender, + response_rx: channel::Receiver, + error_tx: channel::Sender, + error_rx: channel::Receiver, + // Authentication headers to include in requests + headers: HashMap, +} + +impl HttpTransport { + pub fn new( + http_client: Arc, + endpoint: String, + headers: HashMap, + executor: BackgroundExecutor, + ) -> Self { + let (response_tx, response_rx) = channel::unbounded(); + let (error_tx, error_rx) = channel::unbounded(); + + Self { + http_client, + executor, + endpoint, + session_id: Arc::new(SyncMutex::new(None)), + response_tx, + response_rx, + error_tx, + error_rx, + headers, + } + } + + /// Send a message and handle the response based on content type + async fn send_message(&self, message: String) -> Result<()> { + let is_notification = + !message.contains("\"id\":") || message.contains("notifications/initialized"); + + let mut request_builder = Request::builder() + .method(Method::POST) + .uri(&self.endpoint) + .header("Content-Type", JSON_MIME_TYPE) + .header( + "Accept", + format!("{}, {}", JSON_MIME_TYPE, EVENT_STREAM_MIME_TYPE), + ); + + for (key, value) in &self.headers { + request_builder = request_builder.header(key.as_str(), value.as_str()); + } + + // Add session ID if we have one (except for initialize) + if let Some(ref session_id) = *self.session_id.lock() { + request_builder = request_builder.header(HEADER_SESSION_ID, session_id.as_str()); + } + + let request = request_builder.body(AsyncBody::from(message.into_bytes()))?; + let mut response = self.http_client.send(request).await?; + + // Handle different response types based on status and content-type + match response.status() { + status if status.is_success() => { + // Check content type + let content_type = response + .headers() + .get("content-type") + .and_then(|v| v.to_str().ok()); + + // Extract session ID from response headers if present + if let Some(session_id) = response + .headers() + .get(HEADER_SESSION_ID) + .and_then(|v| v.to_str().ok()) + { + *self.session_id.lock() = Some(session_id.to_string()); + log::debug!("Session ID set: {}", session_id); + } + + match content_type { + Some(ct) if ct.starts_with(JSON_MIME_TYPE) => { + // JSON response - read and forward immediately + let mut body = String::new(); + futures::AsyncReadExt::read_to_string(response.body_mut(), &mut body) + .await?; + + // Only send non-empty responses + if !body.is_empty() { + self.response_tx + .send(body) + .await + .map_err(|_| anyhow!("Failed to send JSON response"))?; + } + } + Some(ct) if ct.starts_with(EVENT_STREAM_MIME_TYPE) => { + // SSE stream - set up streaming + self.setup_sse_stream(response).await?; + } + _ => { + // For notifications, 202 Accepted with no content type is ok + if is_notification && status.as_u16() == 202 { + log::debug!("Notification accepted"); + } else { + return Err(anyhow!("Unexpected content type: {:?}", content_type)); + } + } + } + } + status if status.as_u16() == 202 => { + // Accepted - notification acknowledged, no response needed + log::debug!("Notification accepted"); + } + _ => { + let mut error_body = String::new(); + futures::AsyncReadExt::read_to_string(response.body_mut(), &mut error_body).await?; + + self.error_tx + .send(format!("HTTP {}: {}", response.status(), error_body)) + .await + .map_err(|_| anyhow!("Failed to send error"))?; + } + } + + Ok(()) + } + + /// Set up SSE streaming from the response + async fn setup_sse_stream(&self, mut response: Response) -> Result<()> { + let response_tx = self.response_tx.clone(); + let error_tx = self.error_tx.clone(); + + // Spawn a task to handle the SSE stream + smol::spawn(async move { + let reader = futures::io::BufReader::new(response.body_mut()); + let mut lines = futures::AsyncBufReadExt::lines(reader); + + let mut data_buffer = Vec::new(); + let mut in_message = false; + + while let Some(line_result) = lines.next().await { + match line_result { + Ok(line) => { + if line.is_empty() { + // Empty line signals end of event + if !data_buffer.is_empty() { + let message = data_buffer.join("\n"); + + // Filter out ping messages and empty data + if !message.trim().is_empty() && message != "ping" { + if let Err(e) = response_tx.send(message).await { + log::error!("Failed to send SSE message: {}", e); + break; + } + } + data_buffer.clear(); + } + in_message = false; + } else if let Some(data) = line.strip_prefix("data: ") { + // Handle data lines + let data = data.trim(); + if !data.is_empty() { + // Check if this is a ping message + if data == "ping" { + log::trace!("Received SSE ping"); + continue; + } + data_buffer.push(data.to_string()); + in_message = true; + } + } else if line.starts_with("event:") + || line.starts_with("id:") + || line.starts_with("retry:") + { + // Ignore other SSE fields + continue; + } else if in_message { + // Continuation of data + data_buffer.push(line); + } + } + Err(e) => { + let _ = error_tx.send(format!("SSE stream error: {}", e)).await; + break; + } + } + } + }) + .detach(); + + Ok(()) + } +} + +#[async_trait] +impl Transport for HttpTransport { + async fn send(&self, message: String) -> Result<()> { + self.send_message(message).await + } + + fn receive(&self) -> Pin + Send>> { + Box::pin(self.response_rx.clone()) + } + + fn receive_err(&self) -> Pin + Send>> { + Box::pin(self.error_rx.clone()) + } +} + +impl Drop for HttpTransport { + fn drop(&mut self) { + // Try to cleanup session on drop + let http_client = self.http_client.clone(); + let endpoint = self.endpoint.clone(); + let session_id = self.session_id.lock().clone(); + let headers = self.headers.clone(); + + if let Some(session_id) = session_id { + self.executor + .spawn(async move { + let mut request_builder = Request::builder() + .method(Method::DELETE) + .uri(&endpoint) + .header(HEADER_SESSION_ID, &session_id); + + // Add authentication headers if present + for (key, value) in headers { + request_builder = request_builder.header(key.as_str(), value.as_str()); + } + + let request = request_builder.body(AsyncBody::empty()); + + if let Ok(request) = request { + let _ = http_client.send(request).await; + } + }) + .detach(); + } + } +} diff --git a/crates/context_server/src/transport/stdio_transport.rs b/crates/context_server/src/transport/stdio_transport.rs index 83908b46829c4cfe3b536ecca1155c909ee424dd..e675770e9ee50df9993076e6d71c70befa118c4b 100644 --- a/crates/context_server/src/transport/stdio_transport.rs +++ b/crates/context_server/src/transport/stdio_transport.rs @@ -8,9 +8,12 @@ use futures::{ AsyncBufReadExt as _, AsyncRead, AsyncWrite, AsyncWriteExt as _, Stream, StreamExt as _, }; use gpui::AsyncApp; +use settings::Settings as _; use smol::channel; use smol::process::Child; +use terminal::terminal_settings::TerminalSettings; use util::TryFutureExt as _; +use util::shell_builder::ShellBuilder; use crate::client::ModelContextServerBinary; use crate::transport::Transport; @@ -28,9 +31,12 @@ impl StdioTransport { working_directory: &Option, cx: &AsyncApp, ) -> Result { - let mut command = util::command::new_smol_command(&binary.executable); + let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?; + let builder = ShellBuilder::new(&shell, cfg!(windows)).non_interactive(); + let mut command = + builder.build_command(Some(binary.executable.display().to_string()), &binary.args); + command - .args(&binary.args) .envs(binary.env.unwrap_or_default()) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) diff --git a/crates/context_server/src/types.rs b/crates/context_server/src/types.rs index 03aca4f3caf7995091bbc8e049494b324674a9d3..81a427a289347ad50bf6a11674c4c5867073a274 100644 --- a/crates/context_server/src/types.rs +++ b/crates/context_server/src/types.rs @@ -330,7 +330,7 @@ pub struct PromptMessage { pub content: MessageContent, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum Role { User, diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index 0d3b19c0c7bd264f8ed10e53289376055f833307..3a1706a7a679fbc14eafbeac953d842cda9f65c8 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -33,7 +33,7 @@ fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true -edit_prediction.workspace = true +edit_prediction_types.workspace = true language.workspace = true log.workspace = true lsp.workspace = true @@ -52,6 +52,7 @@ ui.workspace = true util.workspace = true workspace.workspace = true itertools.workspace = true +url.workspace = true [target.'cfg(windows)'.dependencies] async-std = { version = "1.12.0", features = ["unstable"] } diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index ed18a199bf2c08c8c046a8ad3e7f945b1340643e..a6963296f5c0ce0395698d2952618123c103ff55 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1,10 +1,11 @@ pub mod copilot_chat; -mod copilot_completion_provider; +mod copilot_edit_prediction_delegate; pub mod copilot_responses; pub mod request; mod sign_in; -use crate::sign_in::initiate_sign_in_within_workspace; +use crate::request::NextEditSuggestions; +use crate::sign_in::initiate_sign_out; use ::fs::Fs; use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, HashSet}; @@ -18,7 +19,7 @@ use http_client::HttpClient; use language::language_settings::CopilotSettings; use language::{ Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16, ToPointUtf16, - language_settings::{EditPredictionProvider, all_language_settings, language_settings}, + language_settings::{EditPredictionProvider, all_language_settings}, point_from_lsp, point_to_lsp, }; use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId, LanguageServerName}; @@ -28,12 +29,10 @@ use project::DisableAiSettings; use request::StatusNotification; use semver::Version; use serde_json::json; -use settings::Settings; -use settings::SettingsStore; -use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace}; -use std::collections::hash_map::Entry; +use settings::{Settings, SettingsStore}; use std::{ any::TypeId, + collections::hash_map::Entry, env, ffi::OsString, mem, @@ -42,12 +41,14 @@ use std::{ sync::Arc, }; use sum_tree::Dimensions; -use util::rel_path::RelPath; use util::{ResultExt, fs::remove_matching}; use workspace::Workspace; -pub use crate::copilot_completion_provider::CopilotCompletionProvider; -pub use crate::sign_in::{CopilotCodeVerification, initiate_sign_in, reinstall_and_sign_in}; +pub use crate::copilot_edit_prediction_delegate::CopilotEditPredictionDelegate; +pub use crate::sign_in::{ + ConfigurationMode, ConfigurationView, CopilotCodeVerification, initiate_sign_in, + reinstall_and_sign_in, +}; actions!( copilot, @@ -98,21 +99,14 @@ pub fn init( .detach(); cx.observe_new(|workspace: &mut Workspace, _window, _cx| { - workspace.register_action(|workspace, _: &SignIn, window, cx| { - if let Some(copilot) = Copilot::global(cx) { - let is_reinstall = false; - initiate_sign_in_within_workspace(workspace, copilot, is_reinstall, window, cx); - } + workspace.register_action(|_, _: &SignIn, window, cx| { + initiate_sign_in(window, cx); }); - workspace.register_action(|workspace, _: &Reinstall, window, cx| { - if let Some(copilot) = Copilot::global(cx) { - reinstall_and_sign_in_within_workspace(workspace, copilot, window, cx); - } + workspace.register_action(|_, _: &Reinstall, window, cx| { + reinstall_and_sign_in(window, cx); }); - workspace.register_action(|workspace, _: &SignOut, _window, cx| { - if let Some(copilot) = Copilot::global(cx) { - sign_out_within_workspace(workspace, copilot, cx); - } + workspace.register_action(|_, _: &SignOut, window, cx| { + initiate_sign_out(window, cx); }); }) .detach(); @@ -322,6 +316,15 @@ struct GlobalCopilot(Entity); impl Global for GlobalCopilot {} +/// Copilot's NextEditSuggestion response, with coordinates converted to Anchors. +struct CopilotEditPrediction { + buffer: Entity, + range: Range, + text: String, + command: Option, + snapshot: BufferSnapshot, +} + impl Copilot { pub fn global(cx: &App) -> Option> { cx.try_global::() @@ -375,7 +378,7 @@ impl Copilot { } } - fn start_copilot( + pub fn start_copilot( &mut self, check_edit_prediction_provider: bool, awaiting_sign_in_after_start: bool, @@ -563,6 +566,14 @@ impl Copilot { let server = start_language_server.await; this.update(cx, |this, cx| { cx.notify(); + + if env::var("ZED_FORCE_COPILOT_ERROR").is_ok() { + this.server = CopilotServer::Error( + "Forced error for testing (ZED_FORCE_COPILOT_ERROR)".into(), + ); + return; + } + match server { Ok((server, status)) => { this.server = CopilotServer::Running(RunningCopilotServer { @@ -584,7 +595,17 @@ impl Copilot { .ok(); } - pub(crate) fn sign_in(&mut self, cx: &mut Context) -> Task> { + pub fn is_authenticated(&self) -> bool { + return matches!( + self.server, + CopilotServer::Running(RunningCopilotServer { + sign_in_status: SignInStatus::Authorized, + .. + }) + ); + } + + pub fn sign_in(&mut self, cx: &mut Context) -> Task> { if let CopilotServer::Running(server) = &mut self.server { let task = match &server.sign_in_status { SignInStatus::Authorized => Task::ready(Ok(())).shared(), @@ -807,7 +828,7 @@ impl Copilot { .ok(); } language::BufferEvent::FileHandleChanged - | language::BufferEvent::LanguageChanged => { + | language::BufferEvent::LanguageChanged(_) => { let new_language_id = id_for_language(buffer.read(cx).language()); let Ok(new_uri) = uri_for_buffer(&buffer, cx) else { return Ok(()); @@ -862,101 +883,19 @@ impl Copilot { } } - pub fn completions( - &mut self, - buffer: &Entity, - position: T, - cx: &mut Context, - ) -> Task>> - where - T: ToPointUtf16, - { - self.request_completions::(buffer, position, cx) - } - - pub fn completions_cycling( + pub(crate) fn completions( &mut self, buffer: &Entity, - position: T, - cx: &mut Context, - ) -> Task>> - where - T: ToPointUtf16, - { - self.request_completions::(buffer, position, cx) - } - - pub fn accept_completion( - &mut self, - completion: &Completion, + position: Anchor, cx: &mut Context, - ) -> Task> { - let server = match self.server.as_authenticated() { - Ok(server) => server, - Err(error) => return Task::ready(Err(error)), - }; - let request = - server - .lsp - .request::(request::NotifyAcceptedParams { - uuid: completion.uuid.clone(), - }); - cx.background_spawn(async move { - request - .await - .into_response() - .context("copilot: notify accepted")?; - Ok(()) - }) - } - - pub fn discard_completions( - &mut self, - completions: &[Completion], - cx: &mut Context, - ) -> Task> { - let server = match self.server.as_authenticated() { - Ok(server) => server, - Err(_) => return Task::ready(Ok(())), - }; - let request = - server - .lsp - .request::(request::NotifyRejectedParams { - uuids: completions - .iter() - .map(|completion| completion.uuid.clone()) - .collect(), - }); - cx.background_spawn(async move { - request - .await - .into_response() - .context("copilot: notify rejected")?; - Ok(()) - }) - } - - fn request_completions( - &mut self, - buffer: &Entity, - position: T, - cx: &mut Context, - ) -> Task>> - where - R: 'static - + lsp::request::Request< - Params = request::GetCompletionsParams, - Result = request::GetCompletionsResult, - >, - T: ToPointUtf16, - { + ) -> Task>> { self.register_buffer(buffer, cx); let server = match self.server.as_authenticated() { Ok(server) => server, Err(error) => return Task::ready(Err(error)), }; + let buffer_entity = buffer.clone(); let lsp = server.lsp.clone(); let registered_buffer = server .registered_buffers @@ -966,46 +905,31 @@ impl Copilot { let buffer = buffer.read(cx); let uri = registered_buffer.uri.clone(); let position = position.to_point_utf16(buffer); - let settings = language_settings( - buffer.language_at(position).map(|l| l.name()), - buffer.file(), - cx, - ); - let tab_size = settings.tab_size; - let hard_tabs = settings.hard_tabs; - let relative_path = buffer - .file() - .map_or(RelPath::empty().into(), |file| file.path().clone()); cx.background_spawn(async move { let (version, snapshot) = snapshot.await?; let result = lsp - .request::(request::GetCompletionsParams { - doc: request::GetCompletionsDocument { - uri, - tab_size: tab_size.into(), - indent_size: 1, - insert_spaces: !hard_tabs, - relative_path: relative_path.to_proto(), - position: point_to_lsp(position), - version: version.try_into().unwrap(), - }, + .request::(request::NextEditSuggestionsParams { + text_document: lsp::VersionedTextDocumentIdentifier { uri, version }, + position: point_to_lsp(position), }) .await .into_response() .context("copilot: get completions")?; let completions = result - .completions + .edits .into_iter() .map(|completion| { let start = snapshot .clip_point_utf16(point_from_lsp(completion.range.start), Bias::Left); let end = snapshot.clip_point_utf16(point_from_lsp(completion.range.end), Bias::Left); - Completion { - uuid: completion.uuid, + CopilotEditPrediction { + buffer: buffer_entity.clone(), range: snapshot.anchor_before(start)..snapshot.anchor_after(end), text: completion.text, + command: completion.command, + snapshot: snapshot.clone(), } }) .collect(); @@ -1013,6 +937,35 @@ impl Copilot { }) } + pub(crate) fn accept_completion( + &mut self, + completion: &CopilotEditPrediction, + cx: &mut Context, + ) -> Task> { + let server = match self.server.as_authenticated() { + Ok(server) => server, + Err(error) => return Task::ready(Err(error)), + }; + if let Some(command) = &completion.command { + let request = server + .lsp + .request::(lsp::ExecuteCommandParams { + command: command.command.clone(), + arguments: command.arguments.clone().unwrap_or_default(), + ..Default::default() + }); + cx.background_spawn(async move { + request + .await + .into_response() + .context("copilot: notify accepted")?; + Ok(()) + }) + } else { + Task::ready(Ok(())) + } + } + pub fn status(&self) -> Status { match &self.server { CopilotServer::Starting { task } => Status::Starting { task: task.clone() }, @@ -1235,7 +1188,10 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: .await; if should_install { node_runtime - .npm_install_packages(paths::copilot_dir(), &[(PACKAGE_NAME, &latest_version)]) + .npm_install_packages( + paths::copilot_dir(), + &[(PACKAGE_NAME, &latest_version.to_string())], + ) .await?; } @@ -1246,7 +1202,11 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: mod tests { use super::*; use gpui::TestAppContext; - use util::{path, paths::PathStyle, rel_path::rel_path}; + use util::{ + path, + paths::PathStyle, + rel_path::{RelPath, rel_path}, + }; #[gpui::test(iterations = 10)] async fn test_buffer_management(cx: &mut TestAppContext) { diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 5d22760942dbbcfd72f1dacb83c249a08f2fe72a..52a3631791ecaf4e1f7b2bc935be37816f2b25de 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -294,6 +294,10 @@ pub enum ChatMessage { content: ChatMessageContent, #[serde(default, skip_serializing_if = "Vec::is_empty")] tool_calls: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + reasoning_opaque: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + reasoning_text: Option, }, User { content: ChatMessageContent, @@ -353,6 +357,8 @@ pub enum ToolCallContent { pub struct FunctionContent { pub name: String, pub arguments: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub thought_signature: Option, } #[derive(Deserialize, Debug)] @@ -384,6 +390,8 @@ pub struct ResponseDelta { pub role: Option, #[serde(default)] pub tool_calls: Vec, + pub reasoning_opaque: Option, + pub reasoning_text: Option, } #[derive(Deserialize, Debug, Eq, PartialEq)] pub struct ToolCallChunk { @@ -396,6 +404,7 @@ pub struct ToolCallChunk { pub struct FunctionChunk { pub name: Option, pub arguments: Option, + pub thought_signature: Option, } #[derive(Deserialize)] @@ -783,13 +792,13 @@ async fn stream_completion( is_user_initiated: bool, ) -> Result>> { let is_vision_request = request.messages.iter().any(|message| match message { - ChatMessage::User { content } - | ChatMessage::Assistant { content, .. } - | ChatMessage::Tool { content, .. } => { - matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. }))) - } - _ => false, - }); + ChatMessage::User { content } + | ChatMessage::Assistant { content, .. } + | ChatMessage::Tool { content, .. } => { + matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. }))) + } + _ => false, + }); let request_initiator = if is_user_initiated { "user" } else { "agent" }; diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs similarity index 73% rename from crates/copilot/src/copilot_completion_provider.rs rename to crates/copilot/src/copilot_edit_prediction_delegate.rs index ba8ae82508896884f1b6e9741e7ccd6fd78dce76..514e135cb4c34f6a1f49687fcd413113f78f9eae 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -1,53 +1,33 @@ -use crate::{Completion, Copilot}; +use crate::{Copilot, CopilotEditPrediction}; use anyhow::Result; -use edit_prediction::{Direction, EditPrediction, EditPredictionProvider}; -use gpui::{App, Context, Entity, EntityId, Task}; -use language::{Buffer, OffsetRangeExt, ToOffset, language_settings::AllLanguageSettings}; -use settings::Settings; -use std::{path::Path, time::Duration}; +use edit_prediction_types::{EditPrediction, EditPredictionDelegate, interpolate_edits}; +use gpui::{App, Context, Entity, Task}; +use language::{Anchor, Buffer, EditPreview, OffsetRangeExt}; +use std::{ops::Range, sync::Arc, time::Duration}; pub const COPILOT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); -pub struct CopilotCompletionProvider { - cycled: bool, - buffer_id: Option, - completions: Vec, - active_completion_index: usize, - file_extension: Option, +pub struct CopilotEditPredictionDelegate { + completion: Option<(CopilotEditPrediction, EditPreview)>, pending_refresh: Option>>, - pending_cycling_refresh: Option>>, copilot: Entity, } -impl CopilotCompletionProvider { +impl CopilotEditPredictionDelegate { pub fn new(copilot: Entity) -> Self { Self { - cycled: false, - buffer_id: None, - completions: Vec::new(), - active_completion_index: 0, - file_extension: None, + completion: None, pending_refresh: None, - pending_cycling_refresh: None, copilot, } } - fn active_completion(&self) -> Option<&Completion> { - self.completions.get(self.active_completion_index) - } - - fn push_completion(&mut self, new_completion: Completion) { - for completion in &self.completions { - if completion.text == new_completion.text && completion.range == new_completion.range { - return; - } - } - self.completions.push(new_completion); + fn active_completion(&self) -> Option<&(CopilotEditPrediction, EditPreview)> { + self.completion.as_ref() } } -impl EditPredictionProvider for CopilotCompletionProvider { +impl EditPredictionDelegate for CopilotEditPredictionDelegate { fn name() -> &'static str { "copilot" } @@ -56,7 +36,7 @@ impl EditPredictionProvider for CopilotCompletionProvider { "Copilot" } - fn show_completions_in_menu() -> bool { + fn show_predictions_in_menu() -> bool { true } @@ -64,12 +44,8 @@ impl EditPredictionProvider for CopilotCompletionProvider { true } - fn supports_jump_to_edit() -> bool { - false - } - - fn is_refreshing(&self) -> bool { - self.pending_refresh.is_some() && self.completions.is_empty() + fn is_refreshing(&self, _cx: &App) -> bool { + self.pending_refresh.is_some() && self.completion.is_none() } fn is_enabled( @@ -102,160 +78,96 @@ impl EditPredictionProvider for CopilotCompletionProvider { })? .await?; - this.update(cx, |this, cx| { - if !completions.is_empty() { - this.cycled = false; + if let Some(mut completion) = completions.into_iter().next() + && let Some(trimmed_completion) = cx + .update(|cx| trim_completion(&completion, cx)) + .ok() + .flatten() + { + let preview = buffer + .update(cx, |this, cx| { + this.preview_edits(Arc::from(std::slice::from_ref(&trimmed_completion)), cx) + })? + .await; + this.update(cx, |this, cx| { this.pending_refresh = None; - this.pending_cycling_refresh = None; - this.completions.clear(); - this.active_completion_index = 0; - this.buffer_id = Some(buffer.entity_id()); - this.file_extension = buffer.read(cx).file().and_then(|file| { - Some( - Path::new(file.file_name(cx)) - .extension()? - .to_str()? - .to_string(), - ) - }); - - for completion in completions { - this.push_completion(completion); - } + completion.range = trimmed_completion.0; + completion.text = trimmed_completion.1.to_string(); + this.completion = Some((completion, preview)); + cx.notify(); - } - })?; + })?; + } Ok(()) })); } - fn cycle( - &mut self, - buffer: Entity, - cursor_position: language::Anchor, - direction: Direction, - cx: &mut Context, - ) { - if self.cycled { - match direction { - Direction::Prev => { - self.active_completion_index = if self.active_completion_index == 0 { - self.completions.len().saturating_sub(1) - } else { - self.active_completion_index - 1 - }; - } - Direction::Next => { - if self.completions.is_empty() { - self.active_completion_index = 0 - } else { - self.active_completion_index = - (self.active_completion_index + 1) % self.completions.len(); - } - } - } - - cx.notify(); - } else { - let copilot = self.copilot.clone(); - self.pending_cycling_refresh = Some(cx.spawn(async move |this, cx| { - let completions = copilot - .update(cx, |copilot, cx| { - copilot.completions_cycling(&buffer, cursor_position, cx) - })? - .await?; - - this.update(cx, |this, cx| { - this.cycled = true; - this.file_extension = buffer.read(cx).file().and_then(|file| { - Some( - Path::new(file.file_name(cx)) - .extension()? - .to_str()? - .to_string(), - ) - }); - for completion in completions { - this.push_completion(completion); - } - this.cycle(buffer, cursor_position, direction, cx); - })?; - - Ok(()) - })); - } - } - fn accept(&mut self, cx: &mut Context) { - if let Some(completion) = self.active_completion() { + if let Some((completion, _)) = self.active_completion() { self.copilot .update(cx, |copilot, cx| copilot.accept_completion(completion, cx)) .detach_and_log_err(cx); } } - fn discard(&mut self, cx: &mut Context) { - let settings = AllLanguageSettings::get_global(cx); - - let copilot_enabled = settings.show_edit_predictions(None, cx); - - if !copilot_enabled { - return; - } - - self.copilot - .update(cx, |copilot, cx| { - copilot.discard_completions(&self.completions, cx) - }) - .detach_and_log_err(cx); - } + fn discard(&mut self, _: &mut Context) {} fn suggest( &mut self, buffer: &Entity, - cursor_position: language::Anchor, + _: language::Anchor, cx: &mut Context, ) -> Option { let buffer_id = buffer.entity_id(); let buffer = buffer.read(cx); - let completion = self.active_completion()?; - if Some(buffer_id) != self.buffer_id + let (completion, edit_preview) = self.active_completion()?; + + if Some(buffer_id) != Some(completion.buffer.entity_id()) || !completion.range.start.is_valid(buffer) || !completion.range.end.is_valid(buffer) { return None; } + let edits = vec![( + completion.range.clone(), + Arc::from(completion.text.as_ref()), + )]; + let edits = interpolate_edits(&completion.snapshot, &buffer.snapshot(), &edits) + .filter(|edits| !edits.is_empty())?; + + Some(EditPrediction::Local { + id: None, + edits, + edit_preview: Some(edit_preview.clone()), + }) + } +} - let mut completion_range = completion.range.to_offset(buffer); - let prefix_len = common_prefix( - buffer.chars_for_range(completion_range.clone()), - completion.text.chars(), - ); - completion_range.start += prefix_len; - let suffix_len = common_prefix( - buffer.reversed_chars_for_range(completion_range.clone()), - completion.text[prefix_len..].chars().rev(), - ); - completion_range.end = completion_range.end.saturating_sub(suffix_len); - - if completion_range.is_empty() - && completion_range.start == cursor_position.to_offset(buffer) - { - let completion_text = &completion.text[prefix_len..completion.text.len() - suffix_len]; - if completion_text.trim().is_empty() { - None - } else { - let position = cursor_position.bias_right(buffer); - Some(EditPrediction::Local { - id: None, - edits: vec![(position..position, completion_text.into())], - edit_preview: None, - }) - } - } else { - None - } +fn trim_completion( + completion: &CopilotEditPrediction, + cx: &mut App, +) -> Option<(Range, Arc)> { + let buffer = completion.buffer.read(cx); + let mut completion_range = completion.range.to_offset(buffer); + let prefix_len = common_prefix( + buffer.chars_for_range(completion_range.clone()), + completion.text.chars(), + ); + completion_range.start += prefix_len; + let suffix_len = common_prefix( + buffer.reversed_chars_for_range(completion_range.clone()), + completion.text[prefix_len..].chars().rev(), + ); + completion_range.end = completion_range.end.saturating_sub(suffix_len); + let completion_text = &completion.text[prefix_len..completion.text.len() - suffix_len]; + if completion_text.trim().is_empty() { + None + } else { + let completion_range = + buffer.anchor_after(completion_range.start)..buffer.anchor_after(completion_range.end); + + Some((completion_range, Arc::from(completion_text))) } } @@ -269,8 +181,9 @@ fn common_prefix, T2: Iterator>(a: T1, b: #[cfg(test)] mod tests { use super::*; + use edit_prediction_types::EditPredictionGranularity; use editor::{ - Editor, ExcerptRange, MultiBuffer, SelectionEffects, + Editor, ExcerptRange, MultiBuffer, MultiBufferOffset, SelectionEffects, test::editor_lsp_test_context::EditorLspTestContext, }; use fs::FakeFs; @@ -281,6 +194,7 @@ mod tests { Point, language_settings::{CompletionSettingsContent, LspInsertMode, WordsCompletionMode}, }; + use lsp::Uri; use project::Project; use serde_json::json; use settings::{AllLanguageSettingsContent, SettingsStore}; @@ -314,7 +228,7 @@ mod tests { cx, ) .await; - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); cx.update_editor(|editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); @@ -336,12 +250,15 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "one.copilot1".into(), range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { @@ -382,12 +299,15 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "one.copilot1".into(), range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, _, cx| { @@ -411,12 +331,15 @@ mod tests { // After debouncing, new Copilot completions should be requested. handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "one.copilot2".into(), range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 5)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { @@ -478,45 +401,6 @@ mod tests { assert_eq!(editor.display_text(cx), "one.cop\ntwo\nthree\n"); assert_eq!(editor.text(cx), "one.cop\ntwo\nthree\n"); }); - - // Reset the editor to verify how suggestions behave when tabbing on leading indentation. - cx.update_editor(|editor, window, cx| { - editor.set_text("fn foo() {\n \n}", window, cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Point::new(1, 2)..Point::new(1, 2)]) - }); - }); - handle_copilot_completion_request( - &copilot_lsp, - vec![crate::request::Completion { - text: " let x = 4;".into(), - range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), - ..Default::default() - }], - vec![], - ); - - cx.update_editor(|editor, window, cx| { - editor.next_edit_prediction(&Default::default(), window, cx) - }); - executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - cx.update_editor(|editor, window, cx| { - assert!(editor.has_active_edit_prediction()); - assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); - assert_eq!(editor.text(cx), "fn foo() {\n \n}"); - - // Tabbing inside of leading whitespace inserts indentation without accepting the suggestion. - editor.tab(&Default::default(), window, cx); - assert!(editor.has_active_edit_prediction()); - assert_eq!(editor.text(cx), "fn foo() {\n \n}"); - assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); - - // Using AcceptEditPrediction again accepts the suggestion. - editor.accept_edit_prediction(&Default::default(), window, cx); - assert!(!editor.has_active_edit_prediction()); - assert_eq!(editor.text(cx), "fn foo() {\n let x = 4;\n}"); - assert_eq!(editor.display_text(cx), "fn foo() {\n let x = 4;\n}"); - }); } #[gpui::test(iterations = 10)] @@ -546,7 +430,7 @@ mod tests { cx, ) .await; - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); cx.update_editor(|editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); @@ -569,25 +453,30 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "one.copilot1".into(), range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { assert!(editor.has_active_edit_prediction()); // Accepting the first word of the suggestion should only accept the first word and still show the rest. - editor.accept_partial_edit_prediction(&Default::default(), window, cx); + editor.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); + assert!(editor.has_active_edit_prediction()); assert_eq!(editor.text(cx), "one.copilot\ntwo\nthree\n"); assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); // Accepting next word should accept the non-word and copilot suggestion should be gone - editor.accept_partial_edit_prediction(&Default::default(), window, cx); + editor.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); + assert!(!editor.has_active_edit_prediction()); assert_eq!(editor.text(cx), "one.copilot1\ntwo\nthree\n"); assert_eq!(editor.display_text(cx), "one.copilot1\ntwo\nthree\n"); @@ -611,19 +500,22 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "one.123. copilot\n 456".into(), range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { assert!(editor.has_active_edit_prediction()); // Accepting the first word (non-word) of the suggestion should only accept the first word and still show the rest. - editor.accept_partial_edit_prediction(&Default::default(), window, cx); + editor.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); assert!(editor.has_active_edit_prediction()); assert_eq!(editor.text(cx), "one.123. \ntwo\nthree\n"); assert_eq!( @@ -632,7 +524,7 @@ mod tests { ); // Accepting next word should accept the next word and copilot suggestion should still exist - editor.accept_partial_edit_prediction(&Default::default(), window, cx); + editor.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); assert!(editor.has_active_edit_prediction()); assert_eq!(editor.text(cx), "one.123. copilot\ntwo\nthree\n"); assert_eq!( @@ -641,7 +533,7 @@ mod tests { ); // Accepting the whitespace should accept the non-word/whitespaces with newline and copilot suggestion should be gone - editor.accept_partial_edit_prediction(&Default::default(), window, cx); + editor.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); assert!(!editor.has_active_edit_prediction()); assert_eq!(editor.text(cx), "one.123. copilot\n 456\ntwo\nthree\n"); assert_eq!( @@ -670,7 +562,7 @@ mod tests { cx, ) .await; - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); cx.update_editor(|editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); @@ -683,15 +575,18 @@ mod tests { handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "two.foo()".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); cx.update_editor(|editor, window, cx| { - editor.next_edit_prediction(&Default::default(), window, cx) + editor.show_edit_prediction(&Default::default(), window, cx) }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, window, cx| { @@ -700,15 +595,22 @@ mod tests { assert_eq!(editor.text(cx), "one\ntw\nthree\n"); editor.backspace(&Default::default(), window, cx); + }); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { assert!(editor.has_active_edit_prediction()); assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); assert_eq!(editor.text(cx), "one\nt\nthree\n"); editor.backspace(&Default::default(), window, cx); + }); + executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); + cx.run_until_parked(); + cx.update_editor(|editor, window, cx| { assert!(editor.has_active_edit_prediction()); assert_eq!(editor.display_text(cx), "one\ntwo.foo()\nthree\n"); assert_eq!(editor.text(cx), "one\n\nthree\n"); - // Deleting across the original suggestion range invalidates it. editor.backspace(&Default::default(), window, cx); assert!(!editor.has_active_edit_prediction()); @@ -750,10 +652,10 @@ mod tests { editor .update(cx, |editor, window, cx| { use gpui::Focusable; - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); }) .unwrap(); - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); editor .update(cx, |editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) @@ -762,19 +664,22 @@ mod tests { handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "b = 2 + a".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 5)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); _ = editor.update(cx, |editor, window, cx| { // Ensure copilot suggestions are shown for the first excerpt. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([Point::new(1, 5)..Point::new(1, 5)]) }); - editor.next_edit_prediction(&Default::default(), window, cx); + editor.show_edit_prediction(&Default::default(), window, cx); }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); _ = editor.update(cx, |editor, _, cx| { @@ -788,12 +693,15 @@ mod tests { handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "d = 4 + c".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 6)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); _ = editor.update(cx, |editor, window, cx| { // Move to another excerpt, ensuring the suggestion gets cleared. @@ -848,7 +756,7 @@ mod tests { cx, ) .await; - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); cx.update_editor(|editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) }); @@ -870,15 +778,18 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "two.foo()".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 2)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); cx.update_editor(|editor, window, cx| { - editor.next_edit_prediction(&Default::default(), window, cx) + editor.show_edit_prediction(&Default::default(), window, cx) }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, _, cx| { @@ -900,12 +811,15 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "two.foo()".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 3)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, _, cx| { @@ -927,12 +841,15 @@ mod tests { )); handle_copilot_completion_request( &copilot_lsp, - vec![crate::request::Completion { + vec![crate::request::NextEditSuggestion { text: "two.foo()".into(), range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 4)), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], - vec![], ); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); cx.update_editor(|editor, _, cx| { @@ -997,10 +914,10 @@ mod tests { editor .update(cx, |editor, window, cx| { use gpui::Focusable; - window.focus(&editor.focus_handle(cx)) + window.focus(&editor.focus_handle(cx), cx) }) .unwrap(); - let copilot_provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); editor .update(cx, |editor, window, cx| { editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) @@ -1008,16 +925,20 @@ mod tests { .unwrap(); let mut copilot_requests = copilot_lsp - .set_request_handler::( + .set_request_handler::( move |_params, _cx| async move { - Ok(crate::request::GetCompletionsResult { - completions: vec![crate::request::Completion { + Ok(crate::request::NextEditSuggestionsResult { + edits: vec![crate::request::NextEditSuggestion { text: "next line".into(), range: lsp::Range::new( lsp::Position::new(1, 0), lsp::Position::new(1, 0), ), - ..Default::default() + command: None, + text_document: lsp::VersionedTextDocumentIdentifier { + uri: Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(), + version: 0, + }, }], }) }, @@ -1046,23 +967,14 @@ mod tests { fn handle_copilot_completion_request( lsp: &lsp::FakeLanguageServer, - completions: Vec, - completions_cycling: Vec, + completions: Vec, ) { - lsp.set_request_handler::(move |_params, _cx| { - let completions = completions.clone(); - async move { - Ok(crate::request::GetCompletionsResult { - completions: completions.clone(), - }) - } - }); - lsp.set_request_handler::( + lsp.set_request_handler::( move |_params, _cx| { - let completions_cycling = completions_cycling.clone(); + let completions = completions.clone(); async move { - Ok(crate::request::GetCompletionsResult { - completions: completions_cycling.clone(), + Ok(crate::request::NextEditSuggestionsResult { + edits: completions.clone(), }) } }, @@ -1081,8 +993,9 @@ mod tests { vec![complete_from_marker, replace_range_marker.clone()], ); + let range = marked_ranges.remove(&replace_range_marker).unwrap()[0].clone(); let replace_range = - cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + cx.to_lsp_range(MultiBufferOffset(range.start)..MultiBufferOffset(range.end)); let mut request = cx.set_request_handler::(move |url, params, _| { diff --git a/crates/copilot/src/copilot_responses.rs b/crates/copilot/src/copilot_responses.rs index c1e066208823dcab34a32096cfa447dd0ec9592f..2da2eb394b5fc5ba88c8dd3007df394a2dbc15bf 100644 --- a/crates/copilot/src/copilot_responses.rs +++ b/crates/copilot/src/copilot_responses.rs @@ -127,6 +127,8 @@ pub enum ResponseInputItem { arguments: String, #[serde(skip_serializing_if = "Option::is_none")] status: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + thought_signature: Option, }, FunctionCallOutput { call_id: String, @@ -251,6 +253,8 @@ pub enum ResponseOutputItem { arguments: String, #[serde(skip_serializing_if = "Option::is_none")] status: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + thought_signature: Option, }, Reasoning { id: String, @@ -309,7 +313,8 @@ pub async fn stream_response( }; let is_streaming = request.stream; - let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let json = serde_json::to_string(&request)?; + let request = request_builder.body(AsyncBody::from(json))?; let mut response = client.send(request).await?; if !response.status().is_success() { diff --git a/crates/copilot/src/request.rs b/crates/copilot/src/request.rs index 85d6254dc060824a9b2686e8f53090fccb39980e..2f97fb72a42904b1fefdd3999f680fca12559ecd 100644 --- a/crates/copilot/src/request.rs +++ b/crates/copilot/src/request.rs @@ -1,3 +1,4 @@ +use lsp::VersionedTextDocumentIdentifier; use serde::{Deserialize, Serialize}; pub enum CheckStatus {} @@ -88,72 +89,6 @@ impl lsp::request::Request for SignOut { const METHOD: &'static str = "signOut"; } -pub enum GetCompletions {} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetCompletionsParams { - pub doc: GetCompletionsDocument, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetCompletionsDocument { - pub tab_size: u32, - pub indent_size: u32, - pub insert_spaces: bool, - pub uri: lsp::Uri, - pub relative_path: String, - pub position: lsp::Position, - pub version: usize, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetCompletionsResult { - pub completions: Vec, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Completion { - pub text: String, - pub position: lsp::Position, - pub uuid: String, - pub range: lsp::Range, - pub display_text: String, -} - -impl lsp::request::Request for GetCompletions { - type Params = GetCompletionsParams; - type Result = GetCompletionsResult; - const METHOD: &'static str = "getCompletions"; -} - -pub enum GetCompletionsCycling {} - -impl lsp::request::Request for GetCompletionsCycling { - type Params = GetCompletionsParams; - type Result = GetCompletionsResult; - const METHOD: &'static str = "getCompletionsCycling"; -} - -pub enum LogMessage {} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LogMessageParams { - pub level: u8, - pub message: String, - pub metadata_str: String, - pub extra: Vec, -} - -impl lsp::notification::Notification for LogMessage { - type Params = LogMessageParams; - const METHOD: &'static str = "LogMessage"; -} - pub enum StatusNotification {} #[derive(Debug, Serialize, Deserialize)] @@ -223,3 +158,36 @@ impl lsp::request::Request for NotifyRejected { type Result = String; const METHOD: &'static str = "notifyRejected"; } + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NextEditSuggestions; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NextEditSuggestionsParams { + pub(crate) text_document: VersionedTextDocumentIdentifier, + pub(crate) position: lsp::Position, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NextEditSuggestion { + pub text: String, + pub text_document: VersionedTextDocumentIdentifier, + pub range: lsp::Range, + pub command: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NextEditSuggestionsResult { + pub edits: Vec, +} + +impl lsp::request::Request for NextEditSuggestions { + type Params = NextEditSuggestionsParams; + type Result = NextEditSuggestionsResult; + + const METHOD: &'static str = "textDocument/copilotInlineEdit"; +} diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index 464a114d4ea11bca5597a6a91fd831ade050baaa..4f71a34408e23f099d4d3c145d86af24e607e3c3 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -1,166 +1,159 @@ use crate::{Copilot, Status, request::PromptUserDeviceFlow}; +use anyhow::Context as _; use gpui::{ - Animation, AnimationExt, App, ClipboardItem, Context, DismissEvent, Element, Entity, - EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, MouseDownEvent, - ParentElement, Render, Styled, Subscription, Transformation, Window, div, percentage, svg, + App, ClipboardItem, Context, DismissEvent, Element, Entity, EventEmitter, FocusHandle, + Focusable, InteractiveElement, IntoElement, MouseDownEvent, ParentElement, Render, Styled, + Subscription, Window, WindowBounds, WindowOptions, div, point, }; -use std::time::Duration; -use ui::{Button, Label, Vector, VectorName, prelude::*}; +use ui::{ButtonLike, CommonAnimationExt, ConfiguredApiCard, Vector, VectorName, prelude::*}; +use url::Url; use util::ResultExt as _; -use workspace::notifications::NotificationId; -use workspace::{ModalView, Toast, Workspace}; +use workspace::{Toast, Workspace, notifications::NotificationId}; const COPILOT_SIGN_UP_URL: &str = "https://github.com/features/copilot"; +const ERROR_LABEL: &str = + "Copilot had issues starting. You can try reinstalling it and signing in again."; struct CopilotStatusToast; pub fn initiate_sign_in(window: &mut Window, cx: &mut App) { + let is_reinstall = false; + initiate_sign_in_impl(is_reinstall, window, cx) +} + +pub fn initiate_sign_out(window: &mut Window, cx: &mut App) { let Some(copilot) = Copilot::global(cx) else { return; }; - let Some(workspace) = window.root::().flatten() else { - return; - }; - workspace.update(cx, |workspace, cx| { - let is_reinstall = false; - initiate_sign_in_within_workspace(workspace, copilot, is_reinstall, window, cx) - }); + + copilot_toast(Some("Signing out of Copilot…"), window, cx); + + let sign_out_task = copilot.update(cx, |copilot, cx| copilot.sign_out(cx)); + window + .spawn(cx, async move |cx| match sign_out_task.await { + Ok(()) => { + cx.update(|window, cx| copilot_toast(Some("Signed out of Copilot"), window, cx)) + } + Err(err) => cx.update(|window, cx| { + if let Some(workspace) = window.root::().flatten() { + workspace.update(cx, |workspace, cx| { + workspace.show_error(&err, cx); + }) + } else { + log::error!("{:?}", err); + } + }), + }) + .detach(); } pub fn reinstall_and_sign_in(window: &mut Window, cx: &mut App) { let Some(copilot) = Copilot::global(cx) else { return; }; + let _ = copilot.update(cx, |copilot, cx| copilot.reinstall(cx)); + let is_reinstall = true; + initiate_sign_in_impl(is_reinstall, window, cx); +} + +fn open_copilot_code_verification_window(copilot: &Entity, window: &Window, cx: &mut App) { + let current_window_center = window.bounds().center(); + let height = px(450.); + let width = px(350.); + let window_bounds = WindowBounds::Windowed(gpui::bounds( + current_window_center - point(height / 2.0, width / 2.0), + gpui::size(height, width), + )); + cx.open_window( + WindowOptions { + kind: gpui::WindowKind::PopUp, + window_bounds: Some(window_bounds), + is_resizable: false, + is_movable: true, + titlebar: Some(gpui::TitlebarOptions { + appears_transparent: true, + ..Default::default() + }), + ..Default::default() + }, + |window, cx| cx.new(|cx| CopilotCodeVerification::new(&copilot, window, cx)), + ) + .context("Failed to open Copilot code verification window") + .log_err(); +} + +fn copilot_toast(message: Option<&'static str>, window: &Window, cx: &mut App) { + const NOTIFICATION_ID: NotificationId = NotificationId::unique::(); + let Some(workspace) = window.root::().flatten() else { return; }; - workspace.update(cx, |workspace, cx| { - reinstall_and_sign_in_within_workspace(workspace, copilot, window, cx); - }); -} -pub fn reinstall_and_sign_in_within_workspace( - workspace: &mut Workspace, - copilot: Entity, - window: &mut Window, - cx: &mut Context, -) { - let _ = copilot.update(cx, |copilot, cx| copilot.reinstall(cx)); - let is_reinstall = true; - initiate_sign_in_within_workspace(workspace, copilot, is_reinstall, window, cx); + workspace.update(cx, |workspace, cx| match message { + Some(message) => workspace.show_toast(Toast::new(NOTIFICATION_ID, message), cx), + None => workspace.dismiss_toast(&NOTIFICATION_ID, cx), + }); } -pub fn initiate_sign_in_within_workspace( - workspace: &mut Workspace, - copilot: Entity, - is_reinstall: bool, - window: &mut Window, - cx: &mut Context, -) { +pub fn initiate_sign_in_impl(is_reinstall: bool, window: &mut Window, cx: &mut App) { + let Some(copilot) = Copilot::global(cx) else { + return; + }; if matches!(copilot.read(cx).status(), Status::Disabled) { copilot.update(cx, |copilot, cx| copilot.start_copilot(false, true, cx)); } match copilot.read(cx).status() { Status::Starting { task } => { - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - if is_reinstall { - "Copilot is reinstalling..." - } else { - "Copilot is starting..." - }, - ), + copilot_toast( + Some(if is_reinstall { + "Copilot is reinstalling…" + } else { + "Copilot is starting…" + }), + window, cx, ); - cx.spawn_in(window, async move |workspace, cx| { - task.await; - if let Some(copilot) = cx.update(|_window, cx| Copilot::global(cx)).ok().flatten() { - workspace - .update_in(cx, |workspace, window, cx| { - match copilot.read(cx).status() { - Status::Authorized => workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "Copilot has started.", - ), - cx, - ), - _ => { - workspace.dismiss_toast( - &NotificationId::unique::(), - cx, - ); - copilot - .update(cx, |copilot, cx| copilot.sign_in(cx)) - .detach_and_log_err(cx); - workspace.toggle_modal(window, cx, |_, cx| { - CopilotCodeVerification::new(&copilot, cx) - }); - } + window + .spawn(cx, async move |cx| { + task.await; + cx.update(|window, cx| { + let Some(copilot) = Copilot::global(cx) else { + return; + }; + match copilot.read(cx).status() { + Status::Authorized => { + copilot_toast(Some("Copilot has started."), window, cx) } - }) - .log_err(); - } - }) - .detach(); + _ => { + copilot_toast(None, window, cx); + copilot + .update(cx, |copilot, cx| copilot.sign_in(cx)) + .detach_and_log_err(cx); + open_copilot_code_verification_window(&copilot, window, cx); + } + } + }) + .log_err(); + }) + .detach(); } _ => { copilot .update(cx, |copilot, cx| copilot.sign_in(cx)) .detach(); - workspace.toggle_modal(window, cx, |_, cx| { - CopilotCodeVerification::new(&copilot, cx) - }); + open_copilot_code_verification_window(&copilot, window, cx); } } } -pub fn sign_out_within_workspace( - workspace: &mut Workspace, - copilot: Entity, - cx: &mut Context, -) { - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "Signing out of Copilot...", - ), - cx, - ); - let sign_out_task = copilot.update(cx, |copilot, cx| copilot.sign_out(cx)); - cx.spawn(async move |workspace, cx| match sign_out_task.await { - Ok(()) => { - workspace - .update(cx, |workspace, cx| { - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "Signed out of Copilot.", - ), - cx, - ) - }) - .ok(); - } - Err(err) => { - workspace - .update(cx, |workspace, cx| { - workspace.show_error(&err, cx); - }) - .ok(); - } - }) - .detach(); -} - pub struct CopilotCodeVerification { status: Status, connect_clicked: bool, focus_handle: FocusHandle, copilot: Entity, _subscription: Subscription, + sign_up_url: Option, } impl Focusable for CopilotCodeVerification { @@ -170,29 +163,44 @@ impl Focusable for CopilotCodeVerification { } impl EventEmitter for CopilotCodeVerification {} -impl ModalView for CopilotCodeVerification { - fn on_before_dismiss( - &mut self, - _: &mut Window, - cx: &mut Context, - ) -> workspace::DismissDecision { - self.copilot.update(cx, |copilot, cx| { - if matches!(copilot.status(), Status::SigningIn { .. }) { - copilot.sign_out(cx).detach_and_log_err(cx); + +impl CopilotCodeVerification { + pub fn new(copilot: &Entity, window: &mut Window, cx: &mut Context) -> Self { + window.on_window_should_close(cx, |window, cx| { + if let Some(this) = window.root::().flatten() { + this.update(cx, |this, cx| { + this.before_dismiss(cx); + }); } + true }); - workspace::DismissDecision::Dismiss(true) - } -} + cx.subscribe_in( + &cx.entity(), + window, + |this, _, _: &DismissEvent, window, cx| { + window.remove_window(); + this.before_dismiss(cx); + }, + ) + .detach(); -impl CopilotCodeVerification { - pub fn new(copilot: &Entity, cx: &mut Context) -> Self { let status = copilot.read(cx).status(); + // Determine sign-up URL based on verification_uri domain if available + let sign_up_url = if let Status::SigningIn { + prompt: Some(ref prompt), + } = status + { + // Extract domain from verification_uri to construct sign-up URL + Self::get_sign_up_url_from_verification(&prompt.verification_uri) + } else { + None + }; Self { status, connect_clicked: false, focus_handle: cx.focus_handle(), copilot: copilot.clone(), + sign_up_url, _subscription: cx.observe(copilot, |this, copilot, cx| { let status = copilot.read(cx).status(); match status { @@ -206,54 +214,74 @@ impl CopilotCodeVerification { } pub fn set_status(&mut self, status: Status, cx: &mut Context) { + // Update sign-up URL if we have a new verification URI + if let Status::SigningIn { + prompt: Some(ref prompt), + } = status + { + self.sign_up_url = Self::get_sign_up_url_from_verification(&prompt.verification_uri); + } self.status = status; cx.notify(); } + fn get_sign_up_url_from_verification(verification_uri: &str) -> Option { + // Extract domain from verification URI using url crate + if let Ok(url) = Url::parse(verification_uri) + && let Some(host) = url.host_str() + && !host.contains("github.com") + { + // For GHE, construct URL from domain + Some(format!("https://{}/features/copilot", host)) + } else { + None + } + } + fn render_device_code(data: &PromptUserDeviceFlow, cx: &mut Context) -> impl IntoElement { let copied = cx .read_from_clipboard() .map(|item| item.text().as_ref() == Some(&data.user_code)) .unwrap_or(false); - h_flex() - .w_full() - .p_1() - .border_1() - .border_muted(cx) - .rounded_sm() - .cursor_pointer() - .justify_between() - .on_mouse_down(gpui::MouseButton::Left, { + + ButtonLike::new("copy-button") + .full_width() + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .size(ButtonSize::Medium) + .child( + h_flex() + .w_full() + .p_1() + .justify_between() + .child(Label::new(data.user_code.clone())) + .child(Label::new(if copied { "Copied!" } else { "Copy" })), + ) + .on_click({ let user_code = data.user_code.clone(); move |_, window, cx| { cx.write_to_clipboard(ClipboardItem::new_string(user_code.clone())); window.refresh(); } }) - .child(div().flex_1().child(Label::new(data.user_code.clone()))) - .child(div().flex_none().px_1().child(Label::new(if copied { - "Copied!" - } else { - "Copy" - }))) } fn render_prompting_modal( connect_clicked: bool, data: &PromptUserDeviceFlow, - cx: &mut Context, ) -> impl Element { let connect_button_label = if connect_clicked { - "Waiting for connection..." + "Waiting for connection…" } else { "Connect to GitHub" }; + v_flex() .flex_1() - .gap_2() + .gap_2p5() .items_center() - .child(Headline::new("Use GitHub Copilot in Zed.").size(HeadlineSize::Large)) + .text_center() + .child(Headline::new("Use GitHub Copilot in Zed").size(HeadlineSize::Large)) .child( Label::new("Using Copilot requires an active subscription on GitHub.") .color(Color::Muted), @@ -261,110 +289,154 @@ impl CopilotCodeVerification { .child(Self::render_device_code(data, cx)) .child( Label::new("Paste this code into GitHub after clicking the button below.") - .size(ui::LabelSize::Small), - ) - .child( - Button::new("connect-button", connect_button_label) - .on_click({ - let verification_uri = data.verification_uri.clone(); - cx.listener(move |this, _, _window, cx| { - cx.open_url(&verification_uri); - this.connect_clicked = true; - }) - }) - .full_width() - .style(ButtonStyle::Filled), + .color(Color::Muted), ) .child( - Button::new("copilot-enable-cancel-button", "Cancel") - .full_width() - .on_click(cx.listener(|_, _, _, cx| { - cx.emit(DismissEvent); - })), + v_flex() + .w_full() + .gap_1() + .child( + Button::new("connect-button", connect_button_label) + .full_width() + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) + .on_click({ + let verification_uri = data.verification_uri.clone(); + cx.listener(move |this, _, _window, cx| { + cx.open_url(&verification_uri); + this.connect_clicked = true; + }) + }), + ) + .child( + Button::new("copilot-enable-cancel-button", "Cancel") + .full_width() + .size(ButtonSize::Medium) + .on_click(cx.listener(|_, _, _, cx| { + cx.emit(DismissEvent); + })), + ), ) } fn render_enabled_modal(cx: &mut Context) -> impl Element { v_flex() .gap_2() + .text_center() + .justify_center() .child(Headline::new("Copilot Enabled!").size(HeadlineSize::Large)) - .child(Label::new( - "You can update your settings or sign out from the Copilot menu in the status bar.", - )) + .child(Label::new("You're all set to use GitHub Copilot.").color(Color::Muted)) .child( Button::new("copilot-enabled-done-button", "Done") .full_width() + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) .on_click(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))), ) } - fn render_unauthorized_modal(cx: &mut Context) -> impl Element { - v_flex() - .child(Headline::new("You must have an active GitHub Copilot subscription.").size(HeadlineSize::Large)) + fn render_unauthorized_modal(&self, cx: &mut Context) -> impl Element { + let sign_up_url = self + .sign_up_url + .as_deref() + .unwrap_or(COPILOT_SIGN_UP_URL) + .to_owned(); + let description = "Enable Copilot by connecting your existing license once you have subscribed or renewed your subscription."; - .child(Label::new( - "You can enable Copilot by connecting your existing license once you have subscribed or renewed your subscription.", - ).color(Color::Warning)) + v_flex() + .gap_2() + .text_center() + .justify_center() + .child( + Headline::new("You must have an active GitHub Copilot subscription.") + .size(HeadlineSize::Large), + ) + .child(Label::new(description).color(Color::Warning)) .child( Button::new("copilot-subscribe-button", "Subscribe on GitHub") .full_width() - .on_click(|_, _, cx| cx.open_url(COPILOT_SIGN_UP_URL)), + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) + .on_click(move |_, _, cx| cx.open_url(&sign_up_url)), ) .child( Button::new("copilot-subscribe-cancel-button", "Cancel") .full_width() + .size(ButtonSize::Medium) .on_click(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))), ) } - fn render_loading(window: &mut Window, _: &mut Context) -> impl Element { - let loading_icon = svg() - .size_8() - .path(IconName::ArrowCircle.path()) - .text_color(window.text_style().color) - .with_animation( - "icon_circle_arrow", - Animation::new(Duration::from_secs(2)).repeat(), - |svg, delta| svg.with_transformation(Transformation::rotate(percentage(delta))), - ); + fn render_error_modal(_cx: &mut Context) -> impl Element { + v_flex() + .gap_2() + .text_center() + .justify_center() + .child(Headline::new("An Error Happened").size(HeadlineSize::Large)) + .child(Label::new(ERROR_LABEL).color(Color::Muted)) + .child( + Button::new("copilot-subscribe-button", "Reinstall Copilot and Sign In") + .full_width() + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) + .icon(IconName::Download) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .on_click(|_, window, cx| reinstall_and_sign_in(window, cx)), + ) + } - h_flex().justify_center().child(loading_icon) + fn before_dismiss( + &mut self, + cx: &mut Context<'_, CopilotCodeVerification>, + ) -> workspace::DismissDecision { + self.copilot.update(cx, |copilot, cx| { + if matches!(copilot.status(), Status::SigningIn { .. }) { + copilot.sign_out(cx).detach_and_log_err(cx); + } + }); + workspace::DismissDecision::Dismiss(true) } } impl Render for CopilotCodeVerification { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let prompt = match &self.status { - Status::SigningIn { prompt: None } => { - Self::render_loading(window, cx).into_any_element() - } + Status::SigningIn { prompt: None } => Icon::new(IconName::ArrowCircle) + .color(Color::Muted) + .with_rotate_animation(2) + .into_any_element(), Status::SigningIn { prompt: Some(prompt), } => Self::render_prompting_modal(self.connect_clicked, prompt, cx).into_any_element(), Status::Unauthorized => { self.connect_clicked = false; - Self::render_unauthorized_modal(cx).into_any_element() + self.render_unauthorized_modal(cx).into_any_element() } Status::Authorized => { self.connect_clicked = false; Self::render_enabled_modal(cx).into_any_element() } + Status::Error(..) => Self::render_error_modal(cx).into_any_element(), _ => div().into_any_element(), }; v_flex() - .id("copilot code verification") + .id("copilot_code_verification") .track_focus(&self.focus_handle(cx)) - .elevation_3(cx) - .w_96() - .items_center() - .p_4() + .size_full() + .px_4() + .py_8() .gap_2() + .items_center() + .justify_center() + .elevation_3(cx) .on_action(cx.listener(|_, _: &menu::Cancel, _, cx| { cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _| { - window.focus(&this.focus_handle); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + window.focus(&this.focus_handle, cx); })) .child( Vector::new(VectorName::ZedXCopilot, rems(8.), rems(4.)) @@ -373,3 +445,243 @@ impl Render for CopilotCodeVerification { .child(prompt) } } + +pub struct ConfigurationView { + copilot_status: Option, + is_authenticated: fn(cx: &App) -> bool, + edit_prediction: bool, + _subscription: Option, +} + +pub enum ConfigurationMode { + Chat, + EditPrediction, +} + +impl ConfigurationView { + pub fn new( + is_authenticated: fn(cx: &App) -> bool, + mode: ConfigurationMode, + cx: &mut Context, + ) -> Self { + let copilot = Copilot::global(cx); + + Self { + copilot_status: copilot.as_ref().map(|copilot| copilot.read(cx).status()), + is_authenticated, + edit_prediction: matches!(mode, ConfigurationMode::EditPrediction), + _subscription: copilot.as_ref().map(|copilot| { + cx.observe(copilot, |this, model, cx| { + this.copilot_status = Some(model.read(cx).status()); + cx.notify(); + }) + }), + } + } +} + +impl ConfigurationView { + fn is_starting(&self) -> bool { + matches!(&self.copilot_status, Some(Status::Starting { .. })) + } + + fn is_signing_in(&self) -> bool { + matches!( + &self.copilot_status, + Some(Status::SigningIn { .. }) + | Some(Status::SignedOut { + awaiting_signing_in: true + }) + ) + } + + fn is_error(&self) -> bool { + matches!(&self.copilot_status, Some(Status::Error(_))) + } + + fn has_no_status(&self) -> bool { + self.copilot_status.is_none() + } + + fn loading_message(&self) -> Option { + if self.is_starting() { + Some("Starting Copilot…".into()) + } else if self.is_signing_in() { + Some("Signing into Copilot…".into()) + } else { + None + } + } + + fn render_loading_button( + &self, + label: impl Into, + edit_prediction: bool, + ) -> impl IntoElement { + ButtonLike::new("loading_button") + .disabled(true) + .style(ButtonStyle::Outlined) + .when(edit_prediction, |this| this.size(ButtonSize::Medium)) + .child( + h_flex() + .w_full() + .gap_1() + .justify_center() + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_rotate_animation(4), + ) + .child(Label::new(label)), + ) + } + + fn render_sign_in_button(&self, edit_prediction: bool) -> impl IntoElement { + let label = if edit_prediction { + "Sign in to GitHub" + } else { + "Sign in to use GitHub Copilot" + }; + + Button::new("sign_in", label) + .map(|this| { + if edit_prediction { + this.size(ButtonSize::Medium) + } else { + this.full_width() + } + }) + .style(ButtonStyle::Outlined) + .icon(IconName::Github) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .on_click(|_, window, cx| initiate_sign_in(window, cx)) + } + + fn render_reinstall_button(&self, edit_prediction: bool) -> impl IntoElement { + let label = if edit_prediction { + "Reinstall and Sign in" + } else { + "Reinstall Copilot and Sign in" + }; + + Button::new("reinstall_and_sign_in", label) + .map(|this| { + if edit_prediction { + this.size(ButtonSize::Medium) + } else { + this.full_width() + } + }) + .style(ButtonStyle::Outlined) + .icon(IconName::Download) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .on_click(|_, window, cx| reinstall_and_sign_in(window, cx)) + } + + fn render_for_edit_prediction(&self) -> impl IntoElement { + let container = |description: SharedString, action: AnyElement| { + h_flex() + .pt_2p5() + .w_full() + .justify_between() + .child( + v_flex() + .w_full() + .max_w_1_2() + .child(Label::new("Authenticate To Use")) + .child( + Label::new(description) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ) + .child(action) + }; + + let start_label = "To use Copilot for edit predictions, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot subscription.".into(); + let no_status_label = "Copilot requires an active GitHub Copilot subscription. Please ensure Copilot is configured and try again, or use a different edit predictions provider.".into(); + + if let Some(msg) = self.loading_message() { + container( + start_label, + self.render_loading_button(msg, true).into_any_element(), + ) + .into_any_element() + } else if self.is_error() { + container( + ERROR_LABEL.into(), + self.render_reinstall_button(true).into_any_element(), + ) + .into_any_element() + } else if self.has_no_status() { + container( + no_status_label, + self.render_sign_in_button(true).into_any_element(), + ) + .into_any_element() + } else { + container( + start_label, + self.render_sign_in_button(true).into_any_element(), + ) + .into_any_element() + } + } + + fn render_for_chat(&self) -> impl IntoElement { + let start_label = "To use Zed's agent with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription."; + let no_status_label = "Copilot Chat requires an active GitHub Copilot subscription. Please ensure Copilot is configured and try again, or use a different LLM provider."; + + if let Some(msg) = self.loading_message() { + v_flex() + .gap_2() + .child(Label::new(start_label)) + .child(self.render_loading_button(msg, false)) + .into_any_element() + } else if self.is_error() { + v_flex() + .gap_2() + .child(Label::new(ERROR_LABEL)) + .child(self.render_reinstall_button(false)) + .into_any_element() + } else if self.has_no_status() { + v_flex() + .gap_2() + .child(Label::new(no_status_label)) + .child(self.render_sign_in_button(false)) + .into_any_element() + } else { + v_flex() + .gap_2() + .child(Label::new(start_label)) + .child(self.render_sign_in_button(false)) + .into_any_element() + } + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_authenticated = self.is_authenticated; + + if is_authenticated(cx) { + return ConfiguredApiCard::new("Authorized") + .button_label("Sign Out") + .on_click(|_, window, cx| { + initiate_sign_out(window, cx); + }) + .into_any_element(); + } + + if self.edit_prediction { + self.render_for_edit_prediction().into_any_element() + } else { + self.render_for_chat().into_any_element() + } + } +} diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 3f85039e9ea3bce8e702991461adec4a931d3e4a..bd1c1121848e34349b5cd58c0fa033d380fa791b 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -23,6 +23,9 @@ zstd.workspace = true [target.'cfg(target_os = "macos")'.dependencies] mach2.workspace = true +[target.'cfg(target_os = "windows")'.dependencies] +windows.workspace = true + [lints] workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 560ca5a009d5ddf8f3866591ebd9e6247bc98942..4c601c393004beca1d5e550e1eeae7f126751448 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -3,6 +3,8 @@ use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; + +#[cfg(not(target_os = "windows"))] use smol::process::Command; #[cfg(target_os = "macos")] @@ -51,11 +53,13 @@ pub async fn init(crash_init: InitCrashHandler) { unsafe { env::set_var("RUST_BACKTRACE", "1") }; old_hook(info); // prevent the macOS crash dialog from popping up - std::process::exit(1); + if cfg!(target_os = "macos") { + std::process::exit(1); + } })); return; } - (Some(true), _) | (None, _) => { + _ => { panic::set_hook(Box::new(panic_hook)); } } @@ -68,11 +72,16 @@ pub async fn init(crash_init: InitCrashHandler) { // used by the crash handler isn't destroyed correctly which causes it to stay on the file // system and block further attempts to initialize crash handlers with that socket path. let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}")); + #[cfg(not(target_os = "windows"))] let _crash_handler = Command::new(exe) .arg("--crash-handler") .arg(&socket_name) .spawn() .expect("unable to spawn server process"); + + #[cfg(target_os = "windows")] + spawn_crash_handler_windows(&exe, &socket_name); + #[cfg(target_os = "linux")] let server_pid = _crash_handler.id(); info!("spawning crash handler process"); @@ -289,26 +298,29 @@ impl minidumper::ServerHandler for CrashServer { pub fn panic_hook(info: &PanicHookInfo) { // Don't handle a panic on threads that are not relevant to the main execution. if extension_host::wasm_host::IS_WASM_THREAD.with(|v| v.load(Ordering::Acquire)) { + log::error!("wasm thread panicked!"); return; } - let message = info - .payload() - .downcast_ref::<&str>() - .map(|s| s.to_string()) - .or_else(|| info.payload().downcast_ref::().cloned()) - .unwrap_or_else(|| "Box".to_string()); + let message = info.payload_as_str().unwrap_or("Box").to_owned(); let span = info .location() .map(|loc| format!("{}:{}", loc.file(), loc.line())) .unwrap_or_default(); + let current_thread = std::thread::current(); + let thread_name = current_thread.name().unwrap_or(""); + // wait 500ms for the crash handler process to start up // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); for _ in 0..5 { if let Some(client) = CRASH_HANDLER.get() { + let location = info + .location() + .map_or_else(|| "".to_owned(), |location| location.to_string()); + log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); client .send_message( 2, @@ -337,6 +349,57 @@ pub fn panic_hook(info: &PanicHookInfo) { } } +#[cfg(target_os = "windows")] +fn spawn_crash_handler_windows(exe: &Path, socket_name: &Path) { + use std::ffi::OsStr; + use std::iter::once; + use std::os::windows::ffi::OsStrExt; + use windows::Win32::System::Threading::{ + CreateProcessW, PROCESS_CREATION_FLAGS, PROCESS_INFORMATION, STARTF_FORCEOFFFEEDBACK, + STARTUPINFOW, + }; + use windows::core::PWSTR; + + let mut command_line: Vec = OsStr::new(&format!( + "\"{}\" --crash-handler \"{}\"", + exe.display(), + socket_name.display() + )) + .encode_wide() + .chain(once(0)) + .collect(); + + let mut startup_info = STARTUPINFOW::default(); + startup_info.cb = std::mem::size_of::() as u32; + + // By default, Windows enables a "busy" cursor when a GUI application is launched. + // This cursor is disabled once the application starts processing window messages. + // Since the crash handler process doesn't process messages, this "busy" cursor stays enabled for a long time. + // Disable the cursor feedback to prevent this from happening. + startup_info.dwFlags = STARTF_FORCEOFFFEEDBACK; + + let mut process_info = PROCESS_INFORMATION::default(); + + unsafe { + CreateProcessW( + None, + Some(PWSTR(command_line.as_mut_ptr())), + None, + None, + false, + PROCESS_CREATION_FLAGS(0), + None, + None, + &startup_info, + &mut process_info, + ) + .expect("unable to spawn server process"); + + windows::Win32::Foundation::CloseHandle(process_info.hProcess).ok(); + windows::Win32::Foundation::CloseHandle(process_info.hThread).ok(); + } +} + pub fn crash_server(socket: &Path) { let Ok(mut server) = minidumper::Server::with_name(socket) else { log::info!("Couldn't create socket, there may already be a running crash server"); diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index b303a0c0268c7e7812e49d1ff3fbe827f6eac2aa..96a35bc8ab66c4f3d71e4eca46488af90eb14e7c 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -324,6 +324,7 @@ pub async fn download_adapter_from_github( extract_zip(&version_path, file) .await // we cannot check the status as some adapter include files with names that trigger `Illegal byte sequence` + .inspect_err(|e| log::warn!("ZIP extraction error: {}. Ignoring...", e)) .ok(); util::fs::remove_matching(&adapter_path, |entry| { diff --git a/crates/dap_adapters/src/go.rs b/crates/dap_adapters/src/go.rs index 323ca094934fc93466451246f4bc69f34ded4891..d3253d5fe250f7228ebddec15a691ac650a19c89 100644 --- a/crates/dap_adapters/src/go.rs +++ b/crates/dap_adapters/src/go.rs @@ -366,7 +366,7 @@ impl DebugAdapter for GoDebugAdapter { dap::DebugRequest::Attach(attach_config) => { json!({ "request": "attach", - "mode": "debug", + "mode": "local", "processId": attach_config.process_id, }) } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 4d81e5ba851305ae3adc2ee0a6ab6a29f43edd62..a45e16dc32180e1e4ed3b2ec01c92ad07ffc5e93 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -368,6 +368,9 @@ impl PythonDebugAdapter { bail!("Cannot have two different ports in debug configuration") } + if let Some(hostname) = config_host { + tcp_connection.host = Some(hostname.parse().context("hostname must be IPv4")?); + } tcp_connection.port = config_port; DebugpyLaunchMode::AttachWithConnect { host: config_host } } else { @@ -867,7 +870,7 @@ impl DebugAdapter for PythonDebugAdapter { .active_toolchain( delegate.worktree_id(), base_path.into_arc(), - language::LanguageName::new(Self::LANGUAGE_NAME), + language::LanguageName::new_static(Self::LANGUAGE_NAME), cx, ) .await diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 738c60870f2200e11e710f9c94d02682b94677f7..317ce8b4c65e441f1fc4041706989532aa150204 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -998,7 +998,11 @@ impl Item for DapLogView { None } - fn as_searchable(&self, handle: &Entity) -> Option> { + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { Some(Box::new(handle.clone())) } } @@ -1013,11 +1017,13 @@ impl SearchableItem for DapLogView { fn update_matches( &mut self, matches: &[Self::Match], + active_match_index: Option, window: &mut Window, cx: &mut Context, ) { - self.editor - .update(cx, |e, cx| e.update_matches(matches, window, cx)) + self.editor.update(cx, |e, cx| { + e.update_matches(matches, active_match_index, window, cx) + }) } fn query_suggestion(&mut self, window: &mut Window, cx: &mut Context) -> String { @@ -1029,13 +1035,11 @@ impl SearchableItem for DapLogView { &mut self, index: usize, matches: &[Self::Match], - collapse: bool, window: &mut Window, cx: &mut Context, ) { - self.editor.update(cx, |e, cx| { - e.activate_match(index, matches, collapse, window, cx) - }) + self.editor + .update(cx, |e, cx| e.activate_match(index, matches, window, cx)) } fn select_matches( diff --git a/crates/debugger_ui/Cargo.toml b/crates/debugger_ui/Cargo.toml index c1a0657c0ed93508acb330a98dc6d1c1ee91c570..fb79b1b0790b28d7204774720bf9c413cfed64e6 100644 --- a/crates/debugger_ui/Cargo.toml +++ b/crates/debugger_ui/Cargo.toml @@ -37,6 +37,7 @@ dap_adapters = { workspace = true, optional = true } db.workspace = true debugger_tools.workspace = true editor.workspace = true +feature_flags.workspace = true file_icons.workspace = true futures.workspace = true fuzzy.workspace = true @@ -70,6 +71,7 @@ theme.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true ui.workspace = true +ui_input.workspace = true unindent = { workspace = true, optional = true } util.workspace = true workspace.workspace = true @@ -81,6 +83,7 @@ dap_adapters = { workspace = true, features = ["test-support"] } debugger_tools = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } tree-sitter-go.workspace = true unindent.workspace = true diff --git a/crates/debugger_ui/src/attach_modal.rs b/crates/debugger_ui/src/attach_modal.rs index e39a842f63590375898c9870c345574e1932a788..6e537ae0c6e1db7418596cf48b51ca22df30be57 100644 --- a/crates/debugger_ui/src/attach_modal.rs +++ b/crates/debugger_ui/src/attach_modal.rs @@ -1,4 +1,5 @@ use dap::{DapRegistry, DebugRequest}; +use futures::channel::oneshot; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render, Task}; use gpui::{Subscription, WeakEntity}; @@ -9,6 +10,7 @@ use task::ZedDebugConfig; use util::debug_panic; use std::sync::Arc; + use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind}; use ui::{Context, Tooltip, prelude::*}; use ui::{ListItem, ListItemSpacing}; @@ -23,11 +25,16 @@ pub(super) struct Candidate { pub(super) command: Vec, } +pub(crate) enum ModalIntent { + ResolveProcessId(Option>>), + AttachToProcess(ZedDebugConfig), +} + pub(crate) struct AttachModalDelegate { selected_index: usize, matches: Vec, placeholder_text: Arc, - pub(crate) definition: ZedDebugConfig, + pub(crate) intent: ModalIntent, workspace: WeakEntity, candidates: Arc<[Candidate]>, } @@ -35,13 +42,13 @@ pub(crate) struct AttachModalDelegate { impl AttachModalDelegate { fn new( workspace: WeakEntity, - definition: ZedDebugConfig, + intent: ModalIntent, candidates: Arc<[Candidate]>, ) -> Self { Self { workspace, - definition, candidates, + intent, selected_index: 0, matches: Vec::default(), placeholder_text: Arc::from("Select the process you want to attach the debugger to"), @@ -55,8 +62,8 @@ pub struct AttachModal { } impl AttachModal { - pub fn new( - definition: ZedDebugConfig, + pub(crate) fn new( + intent: ModalIntent, workspace: WeakEntity, project: Entity, modal: bool, @@ -65,7 +72,7 @@ impl AttachModal { ) -> Self { let processes_task = get_processes_for_project(&project, cx); - let modal = Self::with_processes(workspace, definition, Arc::new([]), modal, window, cx); + let modal = Self::with_processes(workspace, Arc::new([]), modal, intent, window, cx); cx.spawn_in(window, async move |this, cx| { let processes = processes_task.await; @@ -84,15 +91,15 @@ impl AttachModal { pub(super) fn with_processes( workspace: WeakEntity, - definition: ZedDebugConfig, processes: Arc<[Candidate]>, modal: bool, + intent: ModalIntent, window: &mut Window, cx: &mut Context, ) -> Self { let picker = cx.new(|cx| { Picker::uniform_list( - AttachModalDelegate::new(workspace, definition, processes), + AttachModalDelegate::new(workspace, intent, processes), window, cx, ) @@ -207,7 +214,7 @@ impl PickerDelegate for AttachModalDelegate { }) } - fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context>) { + fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { let candidate = self .matches .get(self.selected_index()) @@ -216,69 +223,86 @@ impl PickerDelegate for AttachModalDelegate { self.candidates.get(ix) }); - let Some(candidate) = candidate else { - return cx.emit(DismissEvent); - }; + match &mut self.intent { + ModalIntent::ResolveProcessId(sender) => { + cx.emit(DismissEvent); - match &mut self.definition.request { - DebugRequest::Attach(config) => { - config.process_id = Some(candidate.pid); - } - DebugRequest::Launch(_) => { - debug_panic!("Debugger attach modal used on launch debug config"); - return; + if let Some(sender) = sender.take() { + sender + .send(candidate.map(|candidate| candidate.pid as i32)) + .ok(); + } } - } - - let workspace = self.workspace.clone(); - let Some(panel) = workspace - .update(cx, |workspace, cx| workspace.panel::(cx)) - .ok() - .flatten() - else { - return; - }; - - if secondary { - // let Some(id) = worktree_id else { return }; - // cx.spawn_in(window, async move |_, cx| { - // panel - // .update_in(cx, |debug_panel, window, cx| { - // debug_panel.save_scenario(&debug_scenario, id, window, cx) - // })? - // .await?; - // anyhow::Ok(()) - // }) - // .detach_and_log_err(cx); - } - let Some(adapter) = cx.read_global::(|registry, _| { - registry.adapter(&self.definition.adapter) - }) else { - return; - }; - - let definition = self.definition.clone(); - cx.spawn_in(window, async move |this, cx| { - let Ok(scenario) = adapter.config_from_zed_format(definition).await else { - return; - }; + ModalIntent::AttachToProcess(definition) => { + let Some(candidate) = candidate else { + return cx.emit(DismissEvent); + }; + + match &mut definition.request { + DebugRequest::Attach(config) => { + config.process_id = Some(candidate.pid); + } + DebugRequest::Launch(_) => { + debug_panic!("Debugger attach modal used on launch debug config"); + return; + } + } - panel - .update_in(cx, |panel, window, cx| { - panel.start_session(scenario, Default::default(), None, None, window, cx); + let workspace = self.workspace.clone(); + let Some(panel) = workspace + .update(cx, |workspace, cx| workspace.panel::(cx)) + .ok() + .flatten() + else { + return; + }; + + let Some(adapter) = cx.read_global::(|registry, _| { + registry.adapter(&definition.adapter) + }) else { + return; + }; + + let definition = definition.clone(); + cx.spawn_in(window, async move |this, cx| { + let Ok(scenario) = adapter.config_from_zed_format(definition).await else { + return; + }; + + panel + .update_in(cx, |panel, window, cx| { + panel.start_session( + scenario, + Default::default(), + None, + None, + window, + cx, + ); + }) + .ok(); + this.update(cx, |_, cx| { + cx.emit(DismissEvent); + }) + .ok(); }) - .ok(); - this.update(cx, |_, cx| { - cx.emit(DismissEvent); - }) - .ok(); - }) - .detach(); + .detach(); + } + } } fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { self.selected_index = 0; + match &mut self.intent { + ModalIntent::ResolveProcessId(sender) => { + if let Some(sender) = sender.take() { + sender.send(None).ok(); + } + } + ModalIntent::AttachToProcess(_) => {} + } + cx.emit(DismissEvent); } @@ -293,7 +317,7 @@ impl PickerDelegate for AttachModalDelegate { let candidate = self.candidates.get(hit.candidate_id)?; Some( - ListItem::new(SharedString::from(format!("process-entry-{ix}"))) + ListItem::new(format!("process-entry-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) @@ -303,7 +327,7 @@ impl PickerDelegate for AttachModalDelegate { .child(Label::new(format!("{} {}", candidate.name, candidate.pid))) .child( div() - .id(SharedString::from(format!("process-entry-{ix}-command"))) + .id(format!("process-entry-{ix}-command")) .tooltip(Tooltip::text( candidate .command @@ -338,7 +362,7 @@ fn get_processes_for_project(project: &Entity, cx: &mut App) -> Task, cx: &mut App) -> Task) -> Vec { +#[cfg(test)] +pub(crate) fn set_candidates( + modal: &AttachModal, + candidates: Arc<[Candidate]>, + window: &mut Window, + cx: &mut Context, +) { + modal.picker.update(cx, |picker, cx| { + picker.delegate.candidates = candidates; + picker.refresh(window, cx); + }); +} + +#[cfg(test)] +pub(crate) fn process_names(modal: &AttachModal, cx: &mut Context) -> Vec { modal.picker.read_with(cx, |picker, _| { picker .delegate diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 5379591f8ed256d2703a8e61b09925e9743ed341..35ce80d3f64e362735c1c020363dbbfc2703a101 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -14,13 +14,13 @@ use collections::IndexMap; use dap::adapters::DebugAdapterName; use dap::{DapRegistry, StartDebuggingRequestArguments}; use dap::{client::SessionId, debugger_settings::DebuggerSettings}; -use editor::Editor; +use editor::{Editor, MultiBufferOffset, ToPoint}; +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; use gpui::{ - Action, App, AsyncWindowContext, ClipboardItem, Context, DismissEvent, Entity, EntityId, - EventEmitter, FocusHandle, Focusable, MouseButton, MouseDownEvent, Point, Subscription, Task, - WeakEntity, anchored, deferred, + Action, App, AsyncWindowContext, ClipboardItem, Context, Corner, DismissEvent, Entity, + EntityId, EventEmitter, FocusHandle, Focusable, MouseButton, MouseDownEvent, Point, + Subscription, Task, WeakEntity, anchored, deferred, }; -use text::ToPoint as _; use itertools::Itertools as _; use language::Buffer; @@ -32,7 +32,9 @@ use settings::Settings; use std::sync::{Arc, LazyLock}; use task::{DebugScenario, TaskContext}; use tree_sitter::{Query, StreamingIterator as _}; -use ui::{ContextMenu, Divider, PopoverMenuHandle, Tab, Tooltip, prelude::*}; +use ui::{ + ContextMenu, Divider, PopoverMenu, PopoverMenuHandle, SplitButton, Tab, Tooltip, prelude::*, +}; use util::rel_path::RelPath; use util::{ResultExt, debug_panic, maybe}; use workspace::SplitDirection; @@ -43,6 +45,12 @@ use workspace::{ }; use zed_actions::ToggleFocus; +pub struct DebuggerHistoryFeatureFlag; + +impl FeatureFlag for DebuggerHistoryFeatureFlag { + const NAME: &'static str = "debugger-history"; +} + const DEBUG_PANEL_KEY: &str = "DebugPanel"; pub struct DebugPanel { @@ -285,7 +293,7 @@ impl DebugPanel { } }); - session.update(cx, |session, _| match &mut session.mode { + session.update(cx, |session, _| match &mut session.state { SessionState::Booting(state_task) => { *state_task = Some(boot_task); } @@ -569,7 +577,7 @@ impl DebugPanel { menu }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| { this.context_menu.take(); cx.notify(); @@ -652,6 +660,23 @@ impl DebugPanel { .tooltip(Tooltip::text("Open Debug Adapter Logs")) }; + let close_bottom_panel_button = { + h_flex().pl_0p5().gap_1().child(Divider::vertical()).child( + IconButton::new("debug-close-panel", IconName::Close) + .icon_size(IconSize::Small) + .on_click(move |_, window, cx| { + window.dispatch_action(workspace::ToggleBottomDock.boxed_clone(), cx) + }) + .tooltip(Tooltip::text("Close Panel")), + ) + }; + + let thread_status = active_session + .as_ref() + .map(|session| session.read(cx).running_state()) + .and_then(|state| state.read(cx).thread_status(cx)) + .unwrap_or(project::debugger::session::ThreadStatus::Exited); + Some( div.w_full() .py_1() @@ -659,7 +684,7 @@ impl DebugPanel { .justify_between() .border_b_1() .border_color(cx.theme().colors().border) - .when(is_side, |this| this.gap_1()) + .when(is_side, |this| this.gap_1().h(Tab::container_height(cx))) .child( h_flex() .justify_between() @@ -669,10 +694,6 @@ impl DebugPanel { .as_ref() .map(|session| session.read(cx).running_state()), |this, running_state| { - let thread_status = - running_state.read(cx).thread_status(cx).unwrap_or( - project::debugger::session::ThreadStatus::Exited, - ); let capabilities = running_state.read(cx).capabilities(cx); let supports_detach = running_state.read(cx).session().read(cx).is_attached(); @@ -730,7 +751,7 @@ impl DebugPanel { } }) .child( - IconButton::new("debug-step-over", IconName::ArrowRight) + IconButton::new("step-over", IconName::DebugStepOver) .icon_size(IconSize::Small) .on_click(window.listener_for( running_state, @@ -752,32 +773,29 @@ impl DebugPanel { }), ) .child( - IconButton::new( - "debug-step-into", - IconName::ArrowDownRight, - ) - .icon_size(IconSize::Small) - .on_click(window.listener_for( - running_state, - |this, _, _window, cx| { - this.step_in(cx); - }, - )) - .disabled(thread_status != ThreadStatus::Stopped) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "Step In", - &StepInto, - &focus_handle, - cx, - ) - } - }), + IconButton::new("step-into", IconName::DebugStepInto) + .icon_size(IconSize::Small) + .on_click(window.listener_for( + running_state, + |this, _, _window, cx| { + this.step_in(cx); + }, + )) + .disabled(thread_status != ThreadStatus::Stopped) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |_window, cx| { + Tooltip::for_action_in( + "Step In", + &StepInto, + &focus_handle, + cx, + ) + } + }), ) .child( - IconButton::new("debug-step-out", IconName::ArrowUpRight) + IconButton::new("step-out", IconName::DebugStepOut) .icon_size(IconSize::Small) .on_click(window.listener_for( running_state, @@ -864,36 +882,53 @@ impl DebugPanel { } }), ) + .when(supports_detach, |div| { + div.child( + IconButton::new( + "debug-disconnect", + IconName::DebugDetach, + ) + .disabled( + thread_status != ThreadStatus::Stopped + && thread_status != ThreadStatus::Running, + ) + .icon_size(IconSize::Small) + .on_click(window.listener_for( + running_state, + |this, _, _, cx| { + this.detach_client(cx); + }, + )) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |_window, cx| { + Tooltip::for_action_in( + "Detach", + &Detach, + &focus_handle, + cx, + ) + } + }), + ) + }) .when( - supports_detach, - |div| { - div.child( - IconButton::new( - "debug-disconnect", - IconName::DebugDetach, - ) - .disabled( - thread_status != ThreadStatus::Stopped - && thread_status != ThreadStatus::Running, + cx.has_flag::(), + |this| { + this.child(Divider::vertical()).child( + SplitButton::new( + self.render_history_button( + &running_state, + thread_status, + window, + ), + self.render_history_toggle_button( + thread_status, + &running_state, + ) + .into_any_element(), ) - .icon_size(IconSize::Small) - .on_click(window.listener_for( - running_state, - |this, _, _, cx| { - this.detach_client(cx); - }, - )) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "Detach", - &Detach, - &focus_handle, - cx, - ) - } - }), + .style(ui::SplitButtonStyle::Outlined), ) }, ) @@ -958,6 +993,7 @@ impl DebugPanel { .child(edit_debug_json_button()) .child(documentation_button()) .child(logs_button()) + .child(close_bottom_panel_button) }), ), ), @@ -1016,7 +1052,7 @@ impl DebugPanel { cx: &mut Context, ) { debug_assert!(self.sessions_with_children.contains_key(&session_item)); - session_item.focus_handle(cx).focus(window); + session_item.focus_handle(cx).focus(window, cx); session_item.update(cx, |this, cx| { this.running_state().update(cx, |this, cx| { this.go_to_selected_stack_frame(window, cx); @@ -1216,11 +1252,11 @@ impl DebugPanel { let mut last_offset = None; while let Some(mat) = matches.next() { if let Some(pos) = mat.captures.first().map(|m| m.node.byte_range().end) { - last_offset = Some(pos) + last_offset = Some(MultiBufferOffset(pos)) } } let mut edits = Vec::new(); - let mut cursor_position = 0; + let mut cursor_position = MultiBufferOffset(0); if let Some(pos) = last_offset { edits.push((pos..pos, format!(",\n{new_scenario}"))); @@ -1234,24 +1270,25 @@ impl DebugPanel { if let Some(mat) = matches.next() { if let Some(pos) = mat.captures.first().map(|m| m.node.byte_range().end - 1) { - edits.push((pos..pos, format!("\n{new_scenario}\n"))); - cursor_position = pos + "\n ".len(); + edits.push(( + MultiBufferOffset(pos)..MultiBufferOffset(pos), + format!("\n{new_scenario}\n"), + )); + cursor_position = MultiBufferOffset(pos) + "\n ".len(); } } else { - edits.push((0..0, format!("[\n{}\n]", new_scenario))); - cursor_position = "[\n ".len(); + edits.push(( + MultiBufferOffset(0)..MultiBufferOffset(0), + format!("[\n{}\n]", new_scenario), + )); + cursor_position = MultiBufferOffset("[\n ".len()); } } editor.transact(window, cx, |editor, window, cx| { editor.edit(edits, cx); - let snapshot = editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .read(cx) - .snapshot(); + let snapshot = editor.buffer().read(cx).read(cx); let point = cursor_position.to_point(&snapshot); + drop(snapshot); editor.go_to_singleton_buffer_point(point, window, cx); }); Ok(editor.save(SaveOptions::default(), project, window, cx)) @@ -1308,6 +1345,97 @@ impl DebugPanel { }); } } + + fn render_history_button( + &self, + running_state: &Entity, + thread_status: ThreadStatus, + window: &mut Window, + ) -> IconButton { + IconButton::new("debug-back-in-history", IconName::HistoryRerun) + .icon_size(IconSize::Small) + .on_click(window.listener_for(running_state, |this, _, _window, cx| { + this.session().update(cx, |session, cx| { + let ix = session + .active_snapshot_index() + .unwrap_or_else(|| session.historic_snapshots().len()); + + session.select_historic_snapshot(Some(ix.saturating_sub(1)), cx); + }) + })) + .disabled( + thread_status == ThreadStatus::Running || thread_status == ThreadStatus::Stepping, + ) + } + + fn render_history_toggle_button( + &self, + thread_status: ThreadStatus, + running_state: &Entity, + ) -> impl IntoElement { + PopoverMenu::new("debug-back-in-history-menu") + .trigger( + ui::ButtonLike::new_rounded_right("debug-back-in-history-menu-trigger") + .layer(ui::ElevationIndex::ModalSurface) + .size(ui::ButtonSize::None) + .child( + div() + .px_1() + .child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)), + ) + .disabled( + thread_status == ThreadStatus::Running + || thread_status == ThreadStatus::Stepping, + ), + ) + .menu({ + let running_state = running_state.clone(); + move |window, cx| { + let handler = + |ix: Option, running_state: Entity, cx: &mut App| { + running_state.update(cx, |state, cx| { + state.session().update(cx, |session, cx| { + session.select_historic_snapshot(ix, cx); + }) + }) + }; + + let running_state = running_state.clone(); + Some(ContextMenu::build( + window, + cx, + move |mut context_menu, _window, cx| { + let history = running_state + .read(cx) + .session() + .read(cx) + .historic_snapshots(); + + context_menu = context_menu.entry("Current State", None, { + let running_state = running_state.clone(); + move |_window, cx| { + handler(None, running_state.clone(), cx); + } + }); + context_menu = context_menu.separator(); + + for (ix, _) in history.iter().enumerate().rev() { + context_menu = + context_menu.entry(format!("history-{}", ix + 1), None, { + let running_state = running_state.clone(); + move |_window, cx| { + handler(Some(ix), running_state.clone(), cx); + } + }); + } + + context_menu + }, + )) + } + }) + .anchor(Corner::TopRight) + } } async fn register_session_inner( @@ -1429,7 +1557,7 @@ impl Panel for DebugPanel { self.sessions_with_children.keys().for_each(|session_item| { session_item.update(cx, |item, cx| { item.running_state() - .update(cx, |state, _| state.invert_axies()) + .update(cx, |state, cx| state.invert_axies(cx)) }) }); } @@ -1692,7 +1820,7 @@ impl Render for DebugPanel { .child( Button::new("spawn-new-session-empty-state", "New Session") .icon(IconName::Plus) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::Start) .on_click(|_, window, cx| { @@ -1702,8 +1830,7 @@ impl Render for DebugPanel { .child( Button::new("edit-debug-settings", "Edit debug.json") .icon(IconName::Code) - .icon_size(IconSize::XSmall) - .color(Color::Muted) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::Start) .on_click(|_, window, cx| { @@ -1716,8 +1843,7 @@ impl Render for DebugPanel { .child( Button::new("open-debugger-docs", "Debugger Docs") .icon(IconName::Book) - .color(Color::Muted) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::Start) .on_click(|_, _, cx| cx.open_url("https://zed.dev/docs/debugger")), @@ -1728,8 +1854,7 @@ impl Render for DebugPanel { "Debugger Extensions", ) .icon(IconName::Blocks) - .color(Color::Muted) - .icon_size(IconSize::XSmall) + .icon_size(IconSize::Small) .icon_color(Color::Muted) .icon_position(IconPosition::Start) .on_click(|_, window, cx| { @@ -1746,6 +1871,15 @@ impl Render for DebugPanel { }), ); + let has_breakpoints = self + .project + .read(cx) + .breakpoint_store() + .read(cx) + .all_source_breakpoints(cx) + .values() + .any(|breakpoints| !breakpoints.is_empty()); + let breakpoint_list = v_flex() .group("base-breakpoint-list") .when_else( @@ -1769,7 +1903,18 @@ impl Render for DebugPanel { ), ), ) - .child(self.breakpoint_list.clone()); + .when(has_breakpoints, |this| { + this.child(self.breakpoint_list.clone()) + }) + .when(!has_breakpoints, |this| { + this.child( + v_flex().size_full().items_center().justify_center().child( + Label::new("No Breakpoints Set") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + }); this.child( v_flex() diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 0e38719c19b636918b441440568e8588e29b039e..bd5a7cda4a21a3d3fd0ac132d6ba2e7aace68722 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -1,7 +1,7 @@ use std::any::TypeId; use debugger_panel::DebugPanel; -use editor::Editor; +use editor::{Editor, MultiBufferOffsetUtf16}; use gpui::{Action, App, DispatchPhase, EntityInputHandler, actions}; use new_process_modal::{NewProcessModal, NewProcessMode}; use onboarding_modal::DebuggerOnboardingModal; @@ -387,14 +387,17 @@ pub fn init(cx: &mut App) { window.on_action( TypeId::of::(), move |_, _, window, cx| { - maybe!({ + let status = maybe!({ let text = editor .update(cx, |editor, cx| { + let range = editor + .selections + .newest::( + &editor.display_snapshot(cx), + ) + .range(); editor.text_for_range( - editor - .selections - .newest(&editor.display_snapshot(cx)) - .range(), + range.start.0.0..range.end.0.0, &mut None, window, cx, @@ -408,7 +411,13 @@ pub fn init(cx: &mut App) { state.session().update(cx, |session, cx| { session - .evaluate(text, None, stack_id, None, cx) + .evaluate( + text, + Some(dap::EvaluateArgumentsContext::Repl), + stack_id, + None, + cx, + ) .detach(); }); }); @@ -416,6 +425,9 @@ pub fn init(cx: &mut App) { Some(()) }); + if status.is_some() { + cx.stop_propagation(); + } }, ); }) diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index 9960ae8a3642f727069661871e70b7f02fcb3f95..68e391562b57d530a21624b0626173eeb7a67c16 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -12,30 +12,29 @@ use tasks_ui::{TaskOverrides, TasksModal}; use dap::{ DapRegistry, DebugRequest, TelemetrySpawnLocation, adapters::DebugAdapterName, send_telemetry, }; -use editor::{Editor, EditorElement, EditorStyle}; +use editor::Editor; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ Action, App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - KeyContext, Render, Subscription, Task, TextStyle, WeakEntity, + KeyContext, Render, Subscription, Task, WeakEntity, }; use itertools::Itertools as _; use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch}; use project::{DebugScenarioContext, Project, TaskContexts, TaskSourceKind, task_store::TaskStore}; -use settings::Settings; use task::{DebugScenario, RevealTarget, VariableName, ZedDebugConfig}; -use theme::ThemeSettings; use ui::{ - ActiveTheme, Button, ButtonCommon, ButtonSize, CheckboxWithLabel, Clickable, Color, Context, - ContextMenu, Disableable, DropdownMenu, FluentBuilder, Icon, IconName, IconSize, - IconWithIndicator, Indicator, InteractiveElement, IntoElement, KeyBinding, Label, - LabelCommon as _, LabelSize, ListItem, ListItemSpacing, ParentElement, RenderOnce, - SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Tooltip, Window, div, - h_flex, relative, rems, v_flex, + ContextMenu, DropdownMenu, FluentBuilder, IconWithIndicator, Indicator, KeyBinding, ListItem, + ListItemSpacing, Switch, SwitchLabelPosition, ToggleButtonGroup, ToggleButtonSimple, + ToggleState, Tooltip, prelude::*, }; -use util::{ResultExt, rel_path::RelPath, shell::ShellKind}; +use ui_input::InputField; +use util::{ResultExt, debug_panic, rel_path::RelPath, shell::ShellKind}; use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr, pane}; -use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel}; +use crate::{ + attach_modal::{AttachModal, ModalIntent}, + debugger_panel::DebugPanel, +}; pub(super) struct NewProcessModal { workspace: WeakEntity, @@ -398,8 +397,15 @@ impl NewProcessModal { this.attach_picker.update(cx, |this, cx| { this.picker.update(cx, |this, cx| { - this.delegate.definition.adapter = adapter.0.clone(); - this.focus(window, cx); + match &mut this.delegate.intent { + ModalIntent::AttachToProcess(definition) => { + definition.adapter = adapter.0.clone(); + this.focus(window, cx); + }, + ModalIntent::ResolveProcessId(_) => { + debug_panic!("Attach picker attempted to update config when in resolve Process ID mode"); + } + } }) }); } @@ -441,7 +447,7 @@ impl NewProcessModal { &mut self, window: &mut Window, cx: &mut Context, - ) -> ui::DropdownMenu { + ) -> DropdownMenu { let workspace = self.workspace.clone(); let weak = cx.weak_entity(); let active_buffer = self.task_contexts(cx).and_then(|tc| { @@ -501,6 +507,13 @@ impl NewProcessModal { menu }), ) + .style(ui::DropdownStyle::Outlined) + .tab_index(0) + .attach(gpui::Corner::BottomLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) } } @@ -533,44 +546,6 @@ impl Focusable for NewProcessMode { } } -fn render_editor(editor: &Entity, window: &mut Window, cx: &App) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - let theme = cx.theme(); - - let text_style = TextStyle { - color: cx.theme().colors().text, - font_family: settings.buffer_font.family.clone(), - font_features: settings.buffer_font.features.clone(), - font_size: settings.buffer_font_size(cx).into(), - font_weight: settings.buffer_font.weight, - line_height: relative(settings.buffer_line_height.value()), - background_color: Some(theme.colors().editor_background), - ..Default::default() - }; - - let element = EditorElement::new( - editor, - EditorStyle { - background: theme.colors().editor_background, - local_player: theme.players().local(), - text: text_style, - ..Default::default() - }, - ); - - div() - .rounded_md() - .p_1() - .border_1() - .border_color(theme.colors().border_variant) - .when( - editor.focus_handle(cx).contains_focused(window, cx), - |this| this.border_color(theme.colors().border_focused), - ) - .child(element) - .bg(theme.colors().editor_background) -} - impl Render for NewProcessModal { fn render( &mut self, @@ -599,7 +574,7 @@ impl Render for NewProcessModal { NewProcessMode::Launch => NewProcessMode::Task, }; - this.mode_focus_handle(cx).focus(window); + this.mode_focus_handle(cx).focus(window, cx); })) .on_action( cx.listener(|this, _: &pane::ActivatePreviousItem, window, cx| { @@ -610,7 +585,7 @@ impl Render for NewProcessModal { NewProcessMode::Launch => NewProcessMode::Attach, }; - this.mode_focus_handle(cx).focus(window); + this.mode_focus_handle(cx).focus(window, cx); }), ) .child( @@ -620,72 +595,64 @@ impl Render for NewProcessModal { .border_b_1() .border_color(cx.theme().colors().border_variant) .child( - ToggleButton::new( - "debugger-session-ui-tasks-button", - NewProcessMode::Task.to_string(), - ) - .size(ButtonSize::Default) - .toggle_state(matches!(self.mode, NewProcessMode::Task)) - .style(ui::ButtonStyle::Subtle) - .on_click(cx.listener(|this, _, window, cx| { - this.mode = NewProcessMode::Task; - this.mode_focus_handle(cx).focus(window); - cx.notify(); - })) - .tooltip(Tooltip::text("Run predefined task")) - .first(), - ) - .child( - ToggleButton::new( - "debugger-session-ui-launch-button", - NewProcessMode::Debug.to_string(), - ) - .size(ButtonSize::Default) - .style(ui::ButtonStyle::Subtle) - .toggle_state(matches!(self.mode, NewProcessMode::Debug)) - .on_click(cx.listener(|this, _, window, cx| { - this.mode = NewProcessMode::Debug; - this.mode_focus_handle(cx).focus(window); - cx.notify(); - })) - .tooltip(Tooltip::text("Start a predefined debug scenario")) - .middle(), - ) - .child( - ToggleButton::new( - "debugger-session-ui-attach-button", - NewProcessMode::Attach.to_string(), - ) - .size(ButtonSize::Default) - .toggle_state(matches!(self.mode, NewProcessMode::Attach)) - .style(ui::ButtonStyle::Subtle) - .on_click(cx.listener(|this, _, window, cx| { - this.mode = NewProcessMode::Attach; - - if let Some(debugger) = this.debugger.as_ref() { - Self::update_attach_picker(&this.attach_mode, debugger, window, cx); - } - this.mode_focus_handle(cx).focus(window); - cx.notify(); - })) - .tooltip(Tooltip::text("Attach the debugger to a running process")) - .middle(), - ) - .child( - ToggleButton::new( - "debugger-session-ui-custom-button", - NewProcessMode::Launch.to_string(), + ToggleButtonGroup::single_row( + "debugger-mode-buttons", + [ + ToggleButtonSimple::new( + NewProcessMode::Task.to_string(), + cx.listener(|this, _, window, cx| { + this.mode = NewProcessMode::Task; + this.mode_focus_handle(cx).focus(window, cx); + cx.notify(); + }), + ) + .tooltip(Tooltip::text("Run predefined task")), + ToggleButtonSimple::new( + NewProcessMode::Debug.to_string(), + cx.listener(|this, _, window, cx| { + this.mode = NewProcessMode::Debug; + this.mode_focus_handle(cx).focus(window, cx); + cx.notify(); + }), + ) + .tooltip(Tooltip::text("Start a predefined debug scenario")), + ToggleButtonSimple::new( + NewProcessMode::Attach.to_string(), + cx.listener(|this, _, window, cx| { + this.mode = NewProcessMode::Attach; + + if let Some(debugger) = this.debugger.as_ref() { + Self::update_attach_picker( + &this.attach_mode, + debugger, + window, + cx, + ); + } + this.mode_focus_handle(cx).focus(window, cx); + cx.notify(); + }), + ) + .tooltip(Tooltip::text("Attach the debugger to a running process")), + ToggleButtonSimple::new( + NewProcessMode::Launch.to_string(), + cx.listener(|this, _, window, cx| { + this.mode = NewProcessMode::Launch; + this.mode_focus_handle(cx).focus(window, cx); + cx.notify(); + }), + ) + .tooltip(Tooltip::text("Launch a new process with a debugger")), + ], ) - .size(ButtonSize::Default) - .toggle_state(matches!(self.mode, NewProcessMode::Launch)) - .style(ui::ButtonStyle::Subtle) - .on_click(cx.listener(|this, _, window, cx| { - this.mode = NewProcessMode::Launch; - this.mode_focus_handle(cx).focus(window); - cx.notify(); - })) - .tooltip(Tooltip::text("Launch a new process with a debugger")) - .last(), + .label_size(LabelSize::Default) + .auto_width() + .selected_index(match self.mode { + NewProcessMode::Task => 0, + NewProcessMode::Debug => 1, + NewProcessMode::Attach => 2, + NewProcessMode::Launch => 3, + }), ), ) .child(v_flex().child(self.render_mode(window, cx))) @@ -789,22 +756,26 @@ impl RenderOnce for AttachMode { #[derive(Clone)] pub(super) struct ConfigureMode { - program: Entity, - cwd: Entity, + program: Entity, + cwd: Entity, stop_on_entry: ToggleState, save_to_debug_json: ToggleState, } impl ConfigureMode { pub(super) fn new(window: &mut Window, cx: &mut App) -> Entity { - let program = cx.new(|cx| Editor::single_line(window, cx)); - program.update(cx, |this, cx| { - this.set_placeholder_text("ENV=Zed ~/bin/program --option", window, cx); + let program = cx.new(|cx| { + InputField::new(window, cx, "ENV=Zed ~/bin/program --option") + .label("Program") + .tab_stop(true) + .tab_index(1) }); - let cwd = cx.new(|cx| Editor::single_line(window, cx)); - cwd.update(cx, |this, cx| { - this.set_placeholder_text("Ex: $ZED_WORKTREE_ROOT", window, cx); + let cwd = cx.new(|cx| { + InputField::new(window, cx, "Ex: $ZED_WORKTREE_ROOT") + .label("Working Directory") + .tab_stop(true) + .tab_index(2) }); cx.new(|_| Self { @@ -816,9 +787,9 @@ impl ConfigureMode { } fn load(&mut self, cwd: PathBuf, window: &mut Window, cx: &mut App) { - self.cwd.update(cx, |editor, cx| { - if editor.is_empty(cx) { - editor.set_text(cwd.to_string_lossy(), window, cx); + self.cwd.update(cx, |input_field, cx| { + if input_field.is_empty(cx) { + input_field.set_text(cwd.to_string_lossy(), window, cx); } }); } @@ -869,55 +840,48 @@ impl ConfigureMode { } } + fn on_tab(&mut self, _: &menu::SelectNext, window: &mut Window, cx: &mut Context) { + window.focus_next(cx); + } + + fn on_tab_prev( + &mut self, + _: &menu::SelectPrevious, + window: &mut Window, + cx: &mut Context, + ) { + window.focus_prev(cx); + } + fn render( &mut self, adapter_menu: DropdownMenu, - window: &mut Window, + _: &mut Window, cx: &mut ui::Context, ) -> impl IntoElement { v_flex() + .tab_group() + .track_focus(&self.program.focus_handle(cx)) + .on_action(cx.listener(Self::on_tab)) + .on_action(cx.listener(Self::on_tab_prev)) .p_2() .w_full() - .gap_2() - .track_focus(&self.program.focus_handle(cx)) + .gap_3() .child( h_flex() - .gap_2() - .child( - Label::new("Debugger") - .size(LabelSize::Small) - .color(Color::Muted), - ) + .gap_1() + .child(Label::new("Debugger:").color(Color::Muted)) .child(adapter_menu), ) + .child(self.program.clone()) + .child(self.cwd.clone()) .child( - v_flex() - .gap_0p5() - .child( - Label::new("Program") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child(render_editor(&self.program, window, cx)), - ) - .child( - v_flex() - .gap_0p5() - .child( - Label::new("Working Directory") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child(render_editor(&self.cwd, window, cx)), - ) - .child( - CheckboxWithLabel::new( - "debugger-stop-on-entry", - Label::new("Stop on Entry") - .size(LabelSize::Small) - .color(Color::Muted), - self.stop_on_entry, - { + Switch::new("debugger-stop-on-entry", self.stop_on_entry) + .tab_index(3_isize) + .label("Stop on Entry") + .label_position(SwitchLabelPosition::Start) + .label_size(LabelSize::Default) + .on_click({ let this = cx.weak_entity(); move |state, _, cx| { this.update(cx, |this, _| { @@ -925,9 +889,7 @@ impl ConfigureMode { }) .ok(); } - }, - ) - .checkbox_position(ui::IconPosition::End), + }), ) } } @@ -953,8 +915,15 @@ impl AttachMode { stop_on_entry: Some(false), }; let attach_picker = cx.new(|cx| { - let modal = AttachModal::new(definition.clone(), workspace, project, false, window, cx); - window.focus(&modal.focus_handle(cx)); + let modal = AttachModal::new( + ModalIntent::AttachToProcess(definition.clone()), + workspace, + project, + false, + window, + cx, + ); + window.focus(&modal.focus_handle(cx), cx); modal }); @@ -1053,7 +1022,7 @@ impl DebugDelegate { Some(TaskSourceKind::Lsp { language_name, .. }) => { Some(format!("LSP: {language_name}")) } - Some(TaskSourceKind::Language { name }) => Some(format!("Lang: {name}")), + Some(TaskSourceKind::Language { name }) => Some(format!("Language: {name}")), _ => context.clone().and_then(|ctx| { ctx.task_context .task_variables @@ -1550,7 +1519,7 @@ impl PickerDelegate for DebugDelegate { }); Some( - ListItem::new(SharedString::from(format!("debug-scenario-selection-{ix}"))) + ListItem::new(format!("debug-scenario-selection-{ix}")) .inset(true) .start_slot::(icon) .spacing(ListItemSpacing::Sparse) diff --git a/crates/debugger_ui/src/onboarding_modal.rs b/crates/debugger_ui/src/onboarding_modal.rs index 18205209983421691046e8a9d93eb6de32cd4563..b6f1ab944183c4f44d2bc5f6855731abb65ce1f7 100644 --- a/crates/debugger_ui/src/onboarding_modal.rs +++ b/crates/debugger_ui/src/onboarding_modal.rs @@ -83,8 +83,8 @@ impl Render for DebuggerOnboardingModal { debugger_onboarding_event!("Canceled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child( div() diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 0e21ef1268412418c381fc14617a917f9529834d..422207d3cbf4880e0c8e3c02e01dbe373800ea62 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -5,16 +5,23 @@ pub(crate) mod memory_view; pub(crate) mod module_list; pub mod stack_frame_list; pub mod variable_list; -use std::{any::Any, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration}; +use std::{ + any::Any, + ops::ControlFlow, + path::PathBuf, + sync::{Arc, LazyLock}, + time::Duration, +}; use crate::{ ToggleExpandItem, + attach_modal::{AttachModal, ModalIntent}, new_process_modal::resolve_path, persistence::{self, DebuggerPaneItem, SerializedLayout}, session::running::memory_view::MemoryView, }; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result, anyhow, bail}; use breakpoint_list::BreakpointList; use collections::{HashMap, IndexMap}; use console::Console; @@ -56,6 +63,9 @@ use workspace::{ Workspace, item::TabContentParams, move_item, pane::Event, }; +static PROCESS_ID_PLACEHOLDER: LazyLock = + LazyLock::new(|| task::VariableName::PickProcessId.template_value()); + pub struct RunningState { session: Entity, thread_id: Option, @@ -276,10 +286,10 @@ impl Item for SubView { impl Render for SubView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { v_flex() - .id(SharedString::from(format!( + .id(format!( "subview-container-{}", self.kind.to_shared_string() - ))) + )) .on_hover(cx.listener(|this, hovered, _, cx| { this.hovered = *hovered; cx.notify(); @@ -338,7 +348,7 @@ pub(crate) fn new_debugger_pane( debug_assert!(_previous_subscription.is_none()); running .panes - .split(&this_pane, &new_pane, split_direction)?; + .split(&this_pane, &new_pane, split_direction, cx)?; anyhow::Ok(new_pane) }) }) @@ -474,10 +484,7 @@ pub(crate) fn new_debugger_pane( let deemphasized = !pane.has_focus(window, cx); let item_ = item.boxed_clone(); div() - .id(SharedString::from(format!( - "debugger_tab_{}", - item.item_id().as_u64() - ))) + .id(format!("debugger_tab_{}", item.item_id().as_u64())) .p_1() .rounded_md() .cursor_pointer() @@ -597,7 +604,7 @@ impl DebugTerminal { let focus_handle = cx.focus_handle(); let focus_subscription = cx.on_focus(&focus_handle, window, |this, window, cx| { if let Some(terminal) = this.terminal.as_ref() { - terminal.focus_handle(cx).focus(window); + terminal.focus_handle(cx).focus(window, cx); } }); @@ -653,6 +660,40 @@ impl RunningState { } } + pub(crate) fn contains_substring(config: &serde_json::Value, substring: &str) -> bool { + match config { + serde_json::Value::Object(obj) => obj + .values() + .any(|value| Self::contains_substring(value, substring)), + serde_json::Value::Array(array) => array + .iter() + .any(|value| Self::contains_substring(value, substring)), + serde_json::Value::String(s) => s.contains(substring), + _ => false, + } + } + + pub(crate) fn substitute_process_id_in_config(config: &mut serde_json::Value, process_id: i32) { + match config { + serde_json::Value::Object(obj) => { + obj.values_mut().for_each(|value| { + Self::substitute_process_id_in_config(value, process_id); + }); + } + serde_json::Value::Array(array) => { + array.iter_mut().for_each(|value| { + Self::substitute_process_id_in_config(value, process_id); + }); + } + serde_json::Value::String(s) => { + if s.contains(PROCESS_ID_PLACEHOLDER.as_str()) { + *s = s.replace(PROCESS_ID_PLACEHOLDER.as_str(), &process_id.to_string()); + } + } + _ => {} + } + } + pub(crate) fn relativize_paths( key: Option<&str>, config: &mut serde_json::Value, @@ -955,6 +996,31 @@ impl RunningState { Self::relativize_paths(None, &mut config, &task_context); Self::substitute_variables_in_config(&mut config, &task_context); + if Self::contains_substring(&config, PROCESS_ID_PLACEHOLDER.as_str()) || label.as_ref().contains(PROCESS_ID_PLACEHOLDER.as_str()) { + let (tx, rx) = futures::channel::oneshot::channel::>(); + + let weak_workspace_clone = weak_workspace.clone(); + weak_workspace.update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + workspace.toggle_modal(window, cx, |window, cx| { + AttachModal::new( + ModalIntent::ResolveProcessId(Some(tx)), + weak_workspace_clone, + project, + true, + window, + cx, + ) + }); + }).ok(); + + let Some(process_id) = rx.await.ok().flatten() else { + bail!("No process selected with config that contains {}", PROCESS_ID_PLACEHOLDER.as_str()) + }; + + Self::substitute_process_id_in_config(&mut config, process_id); + } + let request_type = match dap_registry .adapter(&adapter) .with_context(|| format!("{}: is not a valid adapter name", &adapter)) { @@ -1396,7 +1462,7 @@ impl RunningState { this.serialize_layout(window, cx); match event { Event::Remove { .. } => { - let _did_find_pane = this.panes.remove(source_pane).is_ok(); + let _did_find_pane = this.panes.remove(source_pane, cx).is_ok(); debug_assert!(_did_find_pane); cx.notify(); } @@ -1674,7 +1740,7 @@ impl RunningState { let is_building = self.session.update(cx, |session, cx| { session.shutdown(cx).detach(); - matches!(session.mode, session::SessionState::Booting(_)) + matches!(session.state, session::SessionState::Booting(_)) }); if is_building { @@ -1823,9 +1889,9 @@ impl RunningState { Member::Axis(group_root) } - pub(crate) fn invert_axies(&mut self) { + pub(crate) fn invert_axies(&mut self, cx: &mut App) { self.dock_axis = self.dock_axis.invert(); - self.panes.invert_axies(); + self.panes.invert_axies(cx); } } diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index 0a02a5a8e4197bf6b959a592b6e3d3da92c00846..f154757429a2bbfe153ee40c2c513dd06f05aa03 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -310,7 +310,7 @@ impl BreakpointList { fn dismiss(&mut self, _: &menu::Cancel, window: &mut Window, cx: &mut Context) { if self.input.focus_handle(cx).contains_focused(window, cx) { - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } else if self.strip_mode.is_some() { self.strip_mode.take(); cx.notify(); @@ -364,9 +364,9 @@ impl BreakpointList { } } } - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } else { - handle.focus(window); + handle.focus(window, cx); } return; @@ -575,7 +575,7 @@ impl BreakpointList { ) .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained) .with_width_from_item(self.max_width_index) - .track_scroll(self.scroll_handle.clone()) + .track_scroll(&self.scroll_handle) .flex_1() } @@ -627,7 +627,7 @@ impl BreakpointList { .on_click({ let focus_handle = focus_handle.clone(); move |_, window, cx| { - focus_handle.focus(window); + focus_handle.focus(window, cx); window.dispatch_action(ToggleEnableBreakpoint.boxed_clone(), cx) } }), @@ -654,7 +654,7 @@ impl BreakpointList { ) .on_click({ move |_, window, cx| { - focus_handle.focus(window); + focus_handle.focus(window, cx); window.dispatch_action(UnsetBreakpoint.boxed_clone(), cx) } }), @@ -776,7 +776,7 @@ impl Render for BreakpointList { .child(self.render_list(cx)) .custom_scrollbars( ui::Scrollbars::new(ScrollAxes::Both) - .tracked_scroll_handle(self.scroll_handle.clone()) + .tracked_scroll_handle(&self.scroll_handle) .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background) .tracked_entity(cx.entity_id()), window, @@ -1407,7 +1407,6 @@ impl RenderOnce for BreakpointOptionsStrip { h_flex() .gap_px() - .mr_3() // Space to avoid overlapping with the scrollbar .justify_end() .when(has_logs || self.is_selected, |this| { this.child( diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index e157d832b440b8016f152c88b376a9418ee3c843..040953bff6e8f0efa6045c1629c964ac98929547 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -8,7 +8,7 @@ use collections::HashMap; use dap::{CompletionItem, CompletionItemType, OutputEvent}; use editor::{ Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId, - SizingBehavior, + MultiBufferOffset, SizingBehavior, }; use fuzzy::StringMatchCandidate; use gpui::{ @@ -18,14 +18,14 @@ use gpui::{ use language::{Anchor, Buffer, CharScopeContext, CodeLabel, TextBufferSnapshot, ToOffset}; use menu::{Confirm, SelectNext, SelectPrevious}; use project::{ - Completion, CompletionDisplayOptions, CompletionResponse, + CompletionDisplayOptions, CompletionResponse, debugger::session::{CompletionsQuery, OutputToken, Session}, lsp_store::CompletionDocumentation, search_history::{SearchHistory, SearchHistoryCursor}, }; use settings::Settings; use std::fmt::Write; -use std::{cell::RefCell, ops::Range, rc::Rc, usize}; +use std::{ops::Range, rc::Rc, usize}; use theme::{Theme, ThemeSettings}; use ui::{ContextMenu, Divider, PopoverMenu, SplitButton, Tooltip, prelude::*}; use util::ResultExt; @@ -105,7 +105,7 @@ impl Console { cx.subscribe(&stack_frame_list, Self::handle_stack_frame_list_events), cx.on_focus(&focus_handle, window, |console, window, cx| { if console.is_running(cx) { - console.query_bar.focus_handle(cx).focus(window); + console.query_bar.focus_handle(cx).focus(window, cx); } }), ]; @@ -161,7 +161,9 @@ impl Console { ) -> Task> { self.console.update(cx, |_, cx| { cx.spawn_in(window, async move |console, cx| { - let mut len = console.update(cx, |this, cx| this.buffer().read(cx).len(cx))?; + let mut len = console + .update(cx, |this, cx| this.buffer().read(cx).len(cx))? + .0; let (output, spans, background_spans) = cx .background_spawn(async move { let mut all_spans = Vec::new(); @@ -227,8 +229,8 @@ impl Console { for (range, color) in spans { let Some(color) = color else { continue }; let start_offset = range.start; - let range = - buffer.anchor_after(range.start)..buffer.anchor_before(range.end); + let range = buffer.anchor_after(MultiBufferOffset(range.start)) + ..buffer.anchor_before(MultiBufferOffset(range.end)); let style = HighlightStyle { color: Some(terminal_view::terminal_element::convert_color( &color, @@ -240,6 +242,7 @@ impl Console { start_offset, vec![range], style, + false, cx, ); } @@ -247,12 +250,13 @@ impl Console { for (range, color) in background_spans { let Some(color) = color else { continue }; let start_offset = range.start; - let range = - buffer.anchor_after(range.start)..buffer.anchor_before(range.end); + let range = buffer.anchor_after(MultiBufferOffset(range.start)) + ..buffer.anchor_before(MultiBufferOffset(range.end)); + let color_fn = color_fetcher(color); console.highlight_background_key::( start_offset, &[range], - color_fetcher(color), + move |_, theme| color_fn(theme), cx, ); } @@ -550,24 +554,12 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider { } } - fn apply_additional_edits_for_completion( - &self, - _buffer: Entity, - _completions: Rc>>, - _completion_index: usize, - _push_to_history: bool, - _cx: &mut Context, - ) -> gpui::Task>> { - Task::ready(Ok(None)) - } - fn is_completion_trigger( &self, buffer: &Entity, position: language::Anchor, text: &str, trigger_in_words: bool, - menu_is_open: bool, cx: &mut Context, ) -> bool { let mut chars = text.chars(); @@ -578,9 +570,6 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider { }; let snapshot = buffer.read(cx).snapshot(); - if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input { - return false; - } let classifier = snapshot .char_classifier_at(position) @@ -677,6 +666,8 @@ impl ConsoleQueryBarCompletionProvider { ), new_text: string_match.string.clone(), label: CodeLabel::plain(string_match.string.clone(), None), + match_start: None, + snippet_deduplication_key: None, icon_path: None, documentation: Some(CompletionDocumentation::MultiLineMarkdown( variable_value.into(), @@ -790,6 +781,8 @@ impl ConsoleQueryBarCompletionProvider { documentation: completion.detail.map(|detail| { CompletionDocumentation::MultiLineMarkdown(detail.into()) }), + match_start: None, + snippet_deduplication_key: None, confirm: None, source: project::CompletionSource::Dap { sort_text }, insert_text_mode: None, @@ -957,7 +950,7 @@ fn color_fetcher(color: ansi::Color) -> fn(&Theme) -> Hsla { mod tests { use super::*; use crate::tests::init_test; - use editor::test::editor_test_context::EditorTestContext; + use editor::{MultiBufferOffset, test::editor_test_context::EditorTestContext}; use gpui::TestAppContext; use language::Point; @@ -989,8 +982,8 @@ mod tests { cx.update_editor(|editor, _, cx| { editor.edit( vec![( - snapshot.offset_for_anchor(&replace_range.start) - ..snapshot.offset_for_anchor(&replace_range.end), + MultiBufferOffset(snapshot.offset_for_anchor(&replace_range.start)) + ..MultiBufferOffset(snapshot.offset_for_anchor(&replace_range.end)), replacement, )], cx, diff --git a/crates/debugger_ui/src/session/running/loaded_source_list.rs b/crates/debugger_ui/src/session/running/loaded_source_list.rs index 921ebd8b5f5bdfe8a3c8a8f7bb1625bd1ffad7fb..e55fad336b5ee6dfbee1cb0c90ea3d19f561a2ba 100644 --- a/crates/debugger_ui/src/session/running/loaded_source_list.rs +++ b/crates/debugger_ui/src/session/running/loaded_source_list.rs @@ -17,7 +17,9 @@ impl LoadedSourceList { let list = ListState::new(0, gpui::ListAlignment::Top, px(1000.)); let _subscription = cx.subscribe(&session, |this, _, event, cx| match event { - SessionEvent::Stopped(_) | SessionEvent::LoadedSources => { + SessionEvent::Stopped(_) + | SessionEvent::HistoricSnapshotSelected + | SessionEvent::LoadedSources => { this.invalidate = true; cx.notify(); } diff --git a/crates/debugger_ui/src/session/running/memory_view.rs b/crates/debugger_ui/src/session/running/memory_view.rs index 8670beb0f5f93f68a6052b868a866e22b82c92fd..f10e5179e37f87be0e27985b557fcb63cf089a42 100644 --- a/crates/debugger_ui/src/session/running/memory_view.rs +++ b/crates/debugger_ui/src/session/running/memory_view.rs @@ -229,7 +229,7 @@ impl MemoryView { rows }, ) - .track_scroll(view_state.scroll_handle) + .track_scroll(&view_state.scroll_handle) .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) .on_scroll_wheel(cx.listener(|this, evt: &ScrollWheelEvent, window, _| { let mut view_state = this.view_state(); @@ -403,7 +403,7 @@ impl MemoryView { this.set_placeholder_text("Write to Selected Memory Range", window, cx); }); self.is_writing_memory = true; - self.query_editor.focus_handle(cx).focus(window); + self.query_editor.focus_handle(cx).focus(window, cx); } else { self.query_editor.update(cx, |this, cx| { this.clear(window, cx); @@ -921,7 +921,7 @@ impl Render for MemoryView { })) .custom_scrollbars( ui::Scrollbars::new(ui::ScrollAxes::Both) - .tracked_scroll_handle(self.view_state_handle.clone()) + .tracked_scroll_handle(&self.view_state_handle) .with_track_along( ui::ScrollAxes::Both, cx.theme().colors().panel_background, diff --git a/crates/debugger_ui/src/session/running/module_list.rs b/crates/debugger_ui/src/session/running/module_list.rs index 545d8392745c636b805cfc1e0743170635ef8abe..7d0228fc6851185d10a3a237257d6244d5a90c76 100644 --- a/crates/debugger_ui/src/session/running/module_list.rs +++ b/crates/debugger_ui/src/session/running/module_list.rs @@ -32,7 +32,9 @@ impl ModuleList { let focus_handle = cx.focus_handle(); let _subscription = cx.subscribe(&session, |this, _, event, cx| match event { - SessionEvent::Stopped(_) | SessionEvent::Modules => { + SessionEvent::Stopped(_) + | SessionEvent::HistoricSnapshotSelected + | SessionEvent::Modules => { if this._rebuild_task.is_some() { this.schedule_rebuild(cx); } @@ -253,7 +255,7 @@ impl ModuleList { range.map(|ix| this.render_entry(ix, cx)).collect() }), ) - .track_scroll(self.scroll_handle.clone()) + .track_scroll(&self.scroll_handle) .size_full() } } @@ -279,6 +281,6 @@ impl Render for ModuleList { .size_full() .p_1() .child(self.render_list(window, cx)) - .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) } } diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index a8fabd327a3de630ff884899fe7af1167932618c..4dffb57a792cb5a5ed6bcc8003a8fa6f3b9af9de 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -4,6 +4,7 @@ use std::time::Duration; use anyhow::{Context as _, Result, anyhow}; use dap::StackFrameId; +use dap::adapters::DebugAdapterName; use db::kvp::KEY_VALUE_STORE; use gpui::{ Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState, @@ -20,7 +21,7 @@ use project::debugger::breakpoint_store::ActiveStackFrame; use project::debugger::session::{Session, SessionEvent, StackFrame, ThreadStatus}; use project::{ProjectItem, ProjectPath}; use ui::{Tooltip, WithScrollbar, prelude::*}; -use workspace::{ItemHandle, Workspace}; +use workspace::{ItemHandle, Workspace, WorkspaceId}; use super::RunningState; @@ -58,6 +59,14 @@ impl From for String { } } +pub(crate) fn stack_frame_filter_key( + adapter_name: &DebugAdapterName, + workspace_id: WorkspaceId, +) -> String { + let database_id: i64 = workspace_id.into(); + format!("stack-frame-list-filter-{}-{}", adapter_name.0, database_id) +} + pub struct StackFrameList { focus_handle: FocusHandle, _subscription: Subscription, @@ -97,7 +106,9 @@ impl StackFrameList { SessionEvent::Threads => { this.schedule_refresh(false, window, cx); } - SessionEvent::Stopped(..) | SessionEvent::StackTrace => { + SessionEvent::Stopped(..) + | SessionEvent::StackTrace + | SessionEvent::HistoricSnapshotSelected => { this.schedule_refresh(true, window, cx); } _ => {} @@ -105,14 +116,18 @@ impl StackFrameList { let list_state = ListState::new(0, gpui::ListAlignment::Top, px(1000.)); - let list_filter = KEY_VALUE_STORE - .read_kvp(&format!( - "stack-frame-list-filter-{}", - session.read(cx).adapter().0 - )) + let list_filter = workspace + .read_with(cx, |workspace, _| workspace.database_id()) .ok() .flatten() - .map(StackFrameFilter::from_str_or_default) + .and_then(|database_id| { + let key = stack_frame_filter_key(&session.read(cx).adapter(), database_id); + KEY_VALUE_STORE + .read_kvp(&key) + .ok() + .flatten() + .map(StackFrameFilter::from_str_or_default) + }) .unwrap_or(StackFrameFilter::All); let mut this = Self { @@ -225,7 +240,6 @@ impl StackFrameList { } this.update_in(cx, |this, window, cx| { this.build_entries(select_first, window, cx); - cx.notify(); }) .ok(); }) @@ -806,15 +820,8 @@ impl StackFrameList { .ok() .flatten() { - let database_id: i64 = database_id.into(); - let save_task = KEY_VALUE_STORE.write_kvp( - format!( - "stack-frame-list-filter-{}-{}", - self.session.read(cx).adapter().0, - database_id, - ), - self.list_filter.into(), - ); + let key = stack_frame_filter_key(&self.session.read(cx).adapter(), database_id); + let save_task = KEY_VALUE_STORE.write_kvp(key, self.list_filter.into()); cx.background_spawn(save_task).detach(); } @@ -913,7 +920,7 @@ impl Render for StackFrameList { ) }) .child(self.render_list(window, cx)) - .vertical_scrollbar_for(self.list_state.clone(), window, cx) + .vertical_scrollbar_for(&self.list_state, window, cx) } } diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index 7d736aace58ab1b27ccab5690cf24d4cff9a47f6..8329a6baf04061cc33e8130a4e6b3a33b35267b6 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -217,6 +217,12 @@ impl VariableList { let _subscriptions = vec![ cx.subscribe(&stack_frame_list, Self::handle_stack_frame_list_events), cx.subscribe(&session, |this, _, event, cx| match event { + SessionEvent::HistoricSnapshotSelected => { + this.selection.take(); + this.edited_path.take(); + this.selected_stack_frame_id.take(); + this.build_entries(cx); + } SessionEvent::Stopped(_) => { this.selection.take(); this.edited_path.take(); @@ -225,7 +231,6 @@ impl VariableList { SessionEvent::Variables | SessionEvent::Watchers => { this.build_entries(cx); } - _ => {} }), cx.on_focus_out(&focus_handle, window, |this, _, _, cx| { @@ -524,7 +529,7 @@ impl VariableList { fn cancel(&mut self, _: &menu::Cancel, window: &mut Window, cx: &mut Context) { self.edited_path.take(); - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); cx.notify(); } @@ -1062,7 +1067,7 @@ impl VariableList { editor.select_all(&editor::actions::SelectAll, window, cx); editor }); - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); editor } @@ -1557,7 +1562,7 @@ impl Render for VariableList { this.render_entries(range, window, cx) }), ) - .track_scroll(self.list_handle.clone()) + .track_scroll(&self.list_handle) .with_width_from_item(self.max_width_index) .with_sizing_behavior(gpui::ListSizingBehavior::Auto) .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained) @@ -1574,10 +1579,10 @@ impl Render for VariableList { ) .with_priority(1) })) - // .vertical_scrollbar_for(self.list_handle.clone(), window, cx) + // .vertical_scrollbar_for(&self.list_handle, window, cx) .custom_scrollbars( ui::Scrollbars::new(ScrollAxes::Both) - .tracked_scroll_handle(self.list_handle.clone()) + .tracked_scroll_handle(&self.list_handle) .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background) .tracked_entity(cx.entity_id()), window, diff --git a/crates/debugger_ui/src/stack_trace_view.rs b/crates/debugger_ui/src/stack_trace_view.rs index 07caabaacaf00d2752a04c5ba68be07a5678c40a..70b88d203e4ff8017127eee2ad6ff0a81df74c69 100644 --- a/crates/debugger_ui/src/stack_trace_view.rs +++ b/crates/debugger_ui/src/stack_trace_view.rs @@ -7,7 +7,7 @@ use editor::{ RowHighlightOptions, SelectionEffects, ToPoint, scroll::Autoscroll, }; use gpui::{ - AnyView, App, AppContext, Entity, EventEmitter, Focusable, IntoElement, Render, SharedString, + App, AppContext, Entity, EventEmitter, Focusable, IntoElement, Render, SharedString, Subscription, Task, WeakEntity, Window, }; use language::{BufferSnapshot, Capability, Point, Selection, SelectionGoal, TreeSitterOptions}; @@ -418,17 +418,17 @@ impl Item for StackTraceView { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } diff --git a/crates/debugger_ui/src/tests/attach_modal.rs b/crates/debugger_ui/src/tests/attach_modal.rs index 80e2b73d5a100bbd21462f0ad80def1997e184de..4df3ebf5196dea266287041e51dd65363d5f685c 100644 --- a/crates/debugger_ui/src/tests/attach_modal.rs +++ b/crates/debugger_ui/src/tests/attach_modal.rs @@ -1,4 +1,8 @@ -use crate::{attach_modal::Candidate, tests::start_debug_session_with, *}; +use crate::{ + attach_modal::{Candidate, ModalIntent}, + tests::start_debug_session_with, + *, +}; use attach_modal::AttachModal; use dap::{FakeAdapter, adapters::DebugTaskDefinition}; use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext}; @@ -98,12 +102,6 @@ async fn test_show_attach_modal_and_select_process( workspace.toggle_modal(window, cx, |window, cx| { AttachModal::with_processes( workspace_handle, - task::ZedDebugConfig { - adapter: FakeAdapter::ADAPTER_NAME.into(), - request: dap::DebugRequest::Attach(AttachRequest::default()), - label: "attach example".into(), - stop_on_entry: None, - }, vec![ Candidate { pid: 0, @@ -124,6 +122,12 @@ async fn test_show_attach_modal_and_select_process( .into_iter() .collect(), true, + ModalIntent::AttachToProcess(task::ZedDebugConfig { + adapter: FakeAdapter::ADAPTER_NAME.into(), + request: dap::DebugRequest::Attach(AttachRequest::default()), + label: "attach example".into(), + stop_on_entry: None, + }), window, cx, ) @@ -138,8 +142,7 @@ async fn test_show_attach_modal_and_select_process( // assert we got the expected processes workspace .update(cx, |_, window, cx| { - let names = - attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx)); + let names = attach_modal.update(cx, |modal, cx| attach_modal::process_names(modal, cx)); // Initially all processes are visible. assert_eq!(3, names.len()); attach_modal.update(cx, |this, cx| { @@ -153,8 +156,7 @@ async fn test_show_attach_modal_and_select_process( // assert we got the expected processes workspace .update(cx, |_, _, cx| { - let names = - attach_modal.update(cx, |modal, cx| attach_modal::_process_names(modal, cx)); + let names = attach_modal.update(cx, |modal, cx| attach_modal::process_names(modal, cx)); // Initially all processes are visible. assert_eq!(2, names.len()); }) @@ -171,3 +173,139 @@ async fn test_show_attach_modal_and_select_process( }) .unwrap(); } + +#[gpui::test] +async fn test_attach_with_pick_pid_variable(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(executor.clone()); + + fs.insert_tree( + path!("/project"), + json!({ + "main.rs": "First line\nSecond line\nThird line\nFourth line", + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let _initialize_subscription = + project::debugger::test::intercept_debug_sessions(cx, |client| { + client.on_request::(move |_, args| { + let raw = &args.raw; + assert_eq!(raw["request"], "attach"); + assert_eq!( + raw["process_id"], "42", + "verify process id has been replaced" + ); + + Ok(()) + }); + }); + + let pick_pid_placeholder = task::VariableName::PickProcessId.template_value(); + workspace + .update(cx, |workspace, window, cx| { + workspace.start_debug_session( + DebugTaskDefinition { + adapter: FakeAdapter::ADAPTER_NAME.into(), + label: "attach with picker".into(), + config: json!({ + "request": "attach", + "process_id": pick_pid_placeholder, + }), + tcp_connection: None, + } + .to_scenario(), + task::TaskContext::default(), + None, + None, + window, + cx, + ) + }) + .unwrap(); + + cx.run_until_parked(); + + let attach_modal = workspace + .update(cx, |workspace, _window, cx| { + workspace.active_modal::(cx) + }) + .unwrap(); + + assert!( + attach_modal.is_some(), + "Attach modal should open when config contains ZED_PICK_PID" + ); + + let attach_modal = attach_modal.unwrap(); + + workspace + .update(cx, |_, window, cx| { + attach_modal.update(cx, |modal, cx| { + attach_modal::set_candidates( + modal, + vec![ + Candidate { + pid: 10, + name: "process-1".into(), + command: vec![], + }, + Candidate { + pid: 42, + name: "target-process".into(), + command: vec![], + }, + Candidate { + pid: 99, + name: "process-3".into(), + command: vec![], + }, + ] + .into_iter() + .collect(), + window, + cx, + ) + }) + }) + .unwrap(); + + cx.run_until_parked(); + + workspace + .update(cx, |_, window, cx| { + attach_modal.update(cx, |modal, cx| { + modal.picker.update(cx, |picker, cx| { + picker.set_query("target", window, cx); + }) + }) + }) + .unwrap(); + + cx.run_until_parked(); + + workspace + .update(cx, |_, _, cx| { + let names = attach_modal.update(cx, |modal, cx| attach_modal::process_names(modal, cx)); + assert_eq!(names.len(), 1); + assert_eq!(names[0], " 42 target-process"); + }) + .unwrap(); + + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + workspace + .update(cx, |workspace, _window, cx| { + assert!( + workspace.active_modal::(cx).is_none(), + "Attach modal should be dismissed after selection" + ); + }) + .unwrap(); +} diff --git a/crates/debugger_ui/src/tests/inline_values.rs b/crates/debugger_ui/src/tests/inline_values.rs index 801e6d43623b50d69ea3ce297c274c2d7e5a8b14..379bc4c98f5341b089b5936ed8571da5a6280723 100644 --- a/crates/debugger_ui/src/tests/inline_values.rs +++ b/crates/debugger_ui/src/tests/inline_values.rs @@ -4,7 +4,7 @@ use dap::{Scope, StackFrame, Variable, requests::Variables}; use editor::{Editor, EditorMode, MultiBuffer}; use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext}; use language::{ - Language, LanguageConfig, LanguageMatcher, tree_sitter_python, tree_sitter_rust, + Language, LanguageConfig, LanguageMatcher, rust_lang, tree_sitter_python, tree_sitter_typescript, }; use project::{FakeFs, Project}; @@ -224,7 +224,7 @@ fn main() { .unwrap(); buffer.update(cx, |buffer, cx| { - buffer.set_language(Some(Arc::new(rust_lang())), cx); + buffer.set_language(Some(rust_lang()), cx); }); let (editor, cx) = cx.add_window_view(|window, cx| { @@ -1521,23 +1521,6 @@ fn main() { }); } -fn rust_lang() -> Language { - let debug_variables_query = include_str!("../../../languages/src/rust/debugger.scm"); - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_debug_variables_query(debug_variables_query) - .unwrap() -} - #[gpui::test] async fn test_python_inline_values(executor: BackgroundExecutor, cx: &mut TestAppContext) { init_test(cx); @@ -1859,21 +1842,23 @@ fn python_lang() -> Language { .unwrap() } -fn go_lang() -> Language { +fn go_lang() -> Arc { let debug_variables_query = include_str!("../../../languages/src/go/debugger.scm"); - Language::new( - LanguageConfig { - name: "Go".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["go".to_string()], + Arc::new( + Language::new( + LanguageConfig { + name: "Go".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["go".to_string()], + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, - Some(tree_sitter_go::LANGUAGE.into()), + Some(tree_sitter_go::LANGUAGE.into()), + ) + .with_debug_variables_query(debug_variables_query) + .unwrap(), ) - .with_debug_variables_query(debug_variables_query) - .unwrap() } /// Test utility function for inline values testing @@ -1891,7 +1876,7 @@ async fn test_inline_values_util( before: &str, after: &str, active_debug_line: Option, - language: Language, + language: Arc, executor: BackgroundExecutor, cx: &mut TestAppContext, ) { @@ -2091,7 +2076,7 @@ async fn test_inline_values_util( .unwrap(); buffer.update(cx, |buffer, cx| { - buffer.set_language(Some(Arc::new(language)), cx); + buffer.set_language(Some(language), cx); }); let (editor, cx) = cx.add_window_view(|window, cx| { @@ -2276,55 +2261,61 @@ fn main() { .await; } -fn javascript_lang() -> Language { +fn javascript_lang() -> Arc { let debug_variables_query = include_str!("../../../languages/src/javascript/debugger.scm"); - Language::new( - LanguageConfig { - name: "JavaScript".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["js".to_string()], + Arc::new( + Language::new( + LanguageConfig { + name: "JavaScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["js".to_string()], + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, - Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + ) + .with_debug_variables_query(debug_variables_query) + .unwrap(), ) - .with_debug_variables_query(debug_variables_query) - .unwrap() } -fn typescript_lang() -> Language { +fn typescript_lang() -> Arc { let debug_variables_query = include_str!("../../../languages/src/typescript/debugger.scm"); - Language::new( - LanguageConfig { - name: "TypeScript".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["ts".to_string()], + Arc::new( + Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, - Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + ) + .with_debug_variables_query(debug_variables_query) + .unwrap(), ) - .with_debug_variables_query(debug_variables_query) - .unwrap() } -fn tsx_lang() -> Language { +fn tsx_lang() -> Arc { let debug_variables_query = include_str!("../../../languages/src/tsx/debugger.scm"); - Language::new( - LanguageConfig { - name: "TSX".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["tsx".to_string()], + Arc::new( + Language::new( + LanguageConfig { + name: "TSX".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["tsx".to_string()], + ..Default::default() + }, ..Default::default() }, - ..Default::default() - }, - Some(tree_sitter_typescript::LANGUAGE_TSX.into()), + Some(tree_sitter_typescript::LANGUAGE_TSX.into()), + ) + .with_debug_variables_query(debug_variables_query) + .unwrap(), ) - .with_debug_variables_query(debug_variables_query) - .unwrap() } #[gpui::test] diff --git a/crates/debugger_ui/src/tests/stack_frame_list.rs b/crates/debugger_ui/src/tests/stack_frame_list.rs index 05e638e2321bb6fcb4504a8bc8c81123f1b09a33..445d5a01d9f062501c889a9d5aa920de6f037914 100644 --- a/crates/debugger_ui/src/tests/stack_frame_list.rs +++ b/crates/debugger_ui/src/tests/stack_frame_list.rs @@ -1,12 +1,15 @@ use crate::{ debugger_panel::DebugPanel, - session::running::stack_frame_list::{StackFrameEntry, StackFrameFilter}, + session::running::stack_frame_list::{ + StackFrameEntry, StackFrameFilter, stack_frame_filter_key, + }, tests::{active_debug_session_panel, init_test, init_test_workspace, start_debug_session}, }; use dap::{ StackFrame, requests::{Scopes, StackTrace, Threads}, }; +use db::kvp::KEY_VALUE_STORE; use editor::{Editor, ToPoint as _}; use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext}; use project::{FakeFs, Project}; @@ -1085,3 +1088,180 @@ async fn test_stack_frame_filter(executor: BackgroundExecutor, cx: &mut TestAppC ); }); } + +#[gpui::test] +async fn test_stack_frame_filter_persistence( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(executor.clone()); + + fs.insert_tree( + path!("/project"), + json!({ + "src": { + "test.js": "function main() { console.log('hello'); }", + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + workspace + .update(cx, |workspace, _, _| { + workspace.set_random_database_id(); + }) + .unwrap(); + + let threads_response = dap::ThreadsResponse { + threads: vec![dap::Thread { + id: 1, + name: "Thread 1".into(), + }], + }; + + let stack_trace_response = dap::StackTraceResponse { + stack_frames: vec![StackFrame { + id: 1, + name: "main".into(), + source: Some(dap::Source { + name: Some("test.js".into()), + path: Some(path!("/project/src/test.js").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: 1, + column: 1, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: None, + }], + total_frames: None, + }; + + let stopped_event = dap::StoppedEvent { + reason: dap::StoppedEventReason::Pause, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + }; + + let session = start_debug_session(&workspace, cx, |_| {}).unwrap(); + let client = session.update(cx, |session, _| session.adapter_client().unwrap()); + let adapter_name = session.update(cx, |session, _| session.adapter()); + + client.on_request::({ + let threads_response = threads_response.clone(); + move |_, _| Ok(threads_response.clone()) + }); + + client.on_request::(move |_, _| Ok(dap::ScopesResponse { scopes: vec![] })); + + client.on_request::({ + let stack_trace_response = stack_trace_response.clone(); + move |_, _| Ok(stack_trace_response.clone()) + }); + + client + .fake_event(dap::messages::Events::Stopped(stopped_event.clone())) + .await; + + cx.run_until_parked(); + + let stack_frame_list = + active_debug_session_panel(workspace, cx).update(cx, |debug_panel_item, cx| { + debug_panel_item + .running_state() + .update(cx, |state, _| state.stack_frame_list().clone()) + }); + + stack_frame_list.update(cx, |stack_frame_list, _cx| { + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::All, + "Initial filter should be All" + ); + }); + + stack_frame_list.update(cx, |stack_frame_list, cx| { + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::OnlyUserFrames, + "Filter should be OnlyUserFrames after toggle" + ); + }); + + cx.run_until_parked(); + + let workspace_id = workspace + .update(cx, |workspace, _window, _cx| workspace.database_id()) + .ok() + .flatten() + .expect("workspace id has to be some for this test to work properly"); + + let key = stack_frame_filter_key(&adapter_name, workspace_id); + let stored_value = KEY_VALUE_STORE.read_kvp(&key).unwrap(); + assert_eq!( + stored_value, + Some(StackFrameFilter::OnlyUserFrames.into()), + "Filter should be persisted in KVP store with key: {}", + key + ); + + client + .fake_event(dap::messages::Events::Terminated(None)) + .await; + cx.run_until_parked(); + + let session2 = start_debug_session(&workspace, cx, |_| {}).unwrap(); + let client2 = session2.update(cx, |session, _| session.adapter_client().unwrap()); + + client2.on_request::({ + let threads_response = threads_response.clone(); + move |_, _| Ok(threads_response.clone()) + }); + + client2.on_request::(move |_, _| Ok(dap::ScopesResponse { scopes: vec![] })); + + client2.on_request::({ + let stack_trace_response = stack_trace_response.clone(); + move |_, _| Ok(stack_trace_response.clone()) + }); + + client2 + .fake_event(dap::messages::Events::Stopped(stopped_event.clone())) + .await; + + cx.run_until_parked(); + + let stack_frame_list2 = + active_debug_session_panel(workspace, cx).update(cx, |debug_panel_item, cx| { + debug_panel_item + .running_state() + .update(cx, |state, _| state.stack_frame_list().clone()) + }); + + stack_frame_list2.update(cx, |stack_frame_list, _cx| { + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::OnlyUserFrames, + "Filter should be restored from KVP store in new session" + ); + }); +} diff --git a/crates/deepseek/src/deepseek.rs b/crates/deepseek/src/deepseek.rs index 64a1cbe5d96354260c2bf84a43ed70be7336aa7a..e978aa08048bfa4c7b7b203ce6b405ba8a0a7d0c 100644 --- a/crates/deepseek/src/deepseek.rs +++ b/crates/deepseek/src/deepseek.rs @@ -155,6 +155,8 @@ pub enum RequestMessage { content: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] tool_calls: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + reasoning_content: Option, }, User { content: String, diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 01626ddfd2a3f1a4773b2e88a9b8ff001b46680a..ba10f6fbdabf05a095a7fed7c6ae682d4dc177c7 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -6,7 +6,7 @@ use crate::{ use anyhow::Result; use collections::HashMap; use editor::{ - Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, + Editor, EditorEvent, EditorSettings, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, multibuffer_context_lines, }; @@ -175,7 +175,7 @@ impl BufferDiagnosticsEditor { // `BufferDiagnosticsEditor` instance. EditorEvent::Focused => { if buffer_diagnostics_editor.multibuffer.read(cx).is_empty() { - window.focus(&buffer_diagnostics_editor.focus_handle); + window.focus(&buffer_diagnostics_editor.focus_handle, cx); } } EditorEvent::Blurred => { @@ -370,11 +370,16 @@ impl BufferDiagnosticsEditor { continue; } + let languages = buffer_diagnostics_editor + .read_with(cx, |b, cx| b.project.read(cx).languages().clone()) + .ok(); + let diagnostic_blocks = cx.update(|_window, cx| { DiagnosticRenderer::diagnostic_blocks_for_group( group, buffer_snapshot.remote_id(), Some(Arc::new(buffer_diagnostics_editor.clone())), + languages, cx, ) })?; @@ -512,7 +517,7 @@ impl BufferDiagnosticsEditor { .editor .read(cx) .focus_handle(cx) - .focus(window); + .focus(window, cx); } } } @@ -612,7 +617,7 @@ impl BufferDiagnosticsEditor { // not empty, focus on the editor instead, which will allow the user to // start interacting and editing the buffer's contents. if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() { - self.editor.focus_handle(cx).focus(window) + self.editor.focus_handle(cx).focus(window, cx) } } @@ -675,11 +680,11 @@ impl Item for BufferDiagnosticsEditor { type_id: std::any::TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } @@ -696,8 +701,12 @@ impl Item for BufferDiagnosticsEditor { }); } - fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { - ToolbarItemLocation::PrimaryLeft + fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation { + if EditorSettings::get_global(cx).toolbar.breadcrumbs { + ToolbarItemLocation::PrimaryLeft + } else { + ToolbarItemLocation::Hidden + } } fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { diff --git a/crates/diagnostics/src/diagnostic_renderer.rs b/crates/diagnostics/src/diagnostic_renderer.rs index 6204bf4b52ddb903773beac28627d53c3cce7765..521752ff1959fccc12b74857e342ff33a0444f3f 100644 --- a/crates/diagnostics/src/diagnostic_renderer.rs +++ b/crates/diagnostics/src/diagnostic_renderer.rs @@ -6,7 +6,7 @@ use editor::{ hover_popover::diagnostics_markdown_style, }; use gpui::{AppContext, Entity, Focusable, WeakEntity}; -use language::{BufferId, Diagnostic, DiagnosticEntryRef}; +use language::{BufferId, Diagnostic, DiagnosticEntryRef, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownElement}; use settings::Settings; @@ -27,6 +27,7 @@ impl DiagnosticRenderer { diagnostic_group: Vec>, buffer_id: BufferId, diagnostics_editor: Option>, + language_registry: Option>, cx: &mut App, ) -> Vec { let Some(primary_ix) = diagnostic_group @@ -75,11 +76,14 @@ impl DiagnosticRenderer { )) } } + results.push(DiagnosticBlock { initial_range: primary.range.clone(), severity: primary.diagnostic.severity, diagnostics_editor: diagnostics_editor.clone(), - markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)), + markdown: cx.new(|cx| { + Markdown::new(markdown.into(), language_registry.clone(), None, cx) + }), }); } else { if entry.range.start.row.abs_diff(primary.range.start.row) >= 5 { @@ -91,7 +95,9 @@ impl DiagnosticRenderer { initial_range: entry.range.clone(), severity: entry.diagnostic.severity, diagnostics_editor: diagnostics_editor.clone(), - markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)), + markdown: cx.new(|cx| { + Markdown::new(markdown.into(), language_registry.clone(), None, cx) + }), }); } } @@ -118,9 +124,16 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer { buffer_id: BufferId, snapshot: EditorSnapshot, editor: WeakEntity, + language_registry: Option>, cx: &mut App, ) -> Vec> { - let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx); + let blocks = Self::diagnostic_blocks_for_group( + diagnostic_group, + buffer_id, + None, + language_registry, + cx, + ); blocks .into_iter() @@ -146,9 +159,16 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer { diagnostic_group: Vec>, range: Range, buffer_id: BufferId, + language_registry: Option>, cx: &mut App, ) -> Option> { - let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx); + let blocks = Self::diagnostic_blocks_for_group( + diagnostic_group, + buffer_id, + None, + language_registry, + cx, + ); blocks .into_iter() .find_map(|block| (block.initial_range == range).then(|| block.markdown)) @@ -206,6 +226,11 @@ impl DiagnosticBlock { self.markdown.clone(), diagnostics_markdown_style(bcx.window, cx), ) + .code_block_renderer(markdown::CodeBlockRenderer::Default { + copy_button: false, + copy_button_on_hover: false, + border: false, + }) .on_url_click({ move |link, window, cx| { editor @@ -259,7 +284,7 @@ impl DiagnosticBlock { if range.context.overlaps(&diagnostic.range, &snapshot) { Self::jump_to( editor, - Anchor::range_in_buffer(excerpt_id, buffer_id, diagnostic.range), + Anchor::range_in_buffer(excerpt_id, diagnostic.range), window, cx, ); @@ -290,6 +315,6 @@ impl DiagnosticBlock { editor.change_selections(Default::default(), window, cx, |s| { s.select_ranges([range.start..range.start]); }); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); } } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 2e729cbdf420264d96db6e6fec8317d250ec642c..d85eb07f68619e15bfe44d26282db3a3e49df4f3 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -12,12 +12,12 @@ use buffer_diagnostics::BufferDiagnosticsEditor; use collections::{BTreeSet, HashMap, HashSet}; use diagnostic_renderer::DiagnosticBlock; use editor::{ - Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, + Editor, EditorEvent, EditorSettings, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, multibuffer_context_lines, }; use gpui::{ - AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, FocusOutEvent, + AnyElement, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, FocusOutEvent, Focusable, Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, div, }; @@ -73,7 +73,7 @@ pub fn init(cx: &mut App) { } pub(crate) struct ProjectDiagnosticsEditor { - project: Entity, + pub project: Entity, workspace: WeakEntity, focus_handle: FocusHandle, editor: Entity, @@ -182,7 +182,6 @@ impl ProjectDiagnosticsEditor { project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { log::debug!("disk based diagnostics finished for server {language_server_id}"); this.close_diagnosticless_buffers( - window, cx, this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx), @@ -244,13 +243,13 @@ impl ProjectDiagnosticsEditor { match event { EditorEvent::Focused => { if this.multibuffer.read(cx).is_empty() { - window.focus(&this.focus_handle); + window.focus(&this.focus_handle, cx); } } - EditorEvent::Blurred => this.close_diagnosticless_buffers(window, cx, false), - EditorEvent::Saved => this.close_diagnosticless_buffers(window, cx, true), + EditorEvent::Blurred => this.close_diagnosticless_buffers(cx, false), + EditorEvent::Saved => this.close_diagnosticless_buffers(cx, true), EditorEvent::SelectionsChanged { .. } => { - this.close_diagnosticless_buffers(window, cx, true) + this.close_diagnosticless_buffers(cx, true) } _ => {} } @@ -298,12 +297,7 @@ impl ProjectDiagnosticsEditor { /// - have no diagnostics anymore /// - are saved (not dirty) /// - and, if `retain_selections` is true, do not have selections within them - fn close_diagnosticless_buffers( - &mut self, - _window: &mut Window, - cx: &mut Context, - retain_selections: bool, - ) { + fn close_diagnosticless_buffers(&mut self, cx: &mut Context, retain_selections: bool) { let snapshot = self .editor .update(cx, |editor, cx| editor.display_snapshot(cx)); @@ -314,7 +308,7 @@ impl ProjectDiagnosticsEditor { .selections .all_anchors(&snapshot) .iter() - .filter_map(|anchor| anchor.start.buffer_id) + .filter_map(|anchor| anchor.start.text_anchor.buffer_id) .collect::>() }); for buffer_id in buffer_ids { @@ -440,14 +434,14 @@ impl ProjectDiagnosticsEditor { fn focus_in(&mut self, window: &mut Window, cx: &mut Context) { if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() { - self.editor.focus_handle(cx).focus(window) + self.editor.focus_handle(cx).focus(window, cx) } } fn focus_out(&mut self, _: FocusOutEvent, window: &mut Window, cx: &mut Context) { if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window) { - self.close_diagnosticless_buffers(window, cx, false); + self.close_diagnosticless_buffers(cx, false); } } @@ -461,8 +455,7 @@ impl ProjectDiagnosticsEditor { }); } }); - self.multibuffer - .update(cx, |multibuffer, cx| multibuffer.clear(cx)); + self.close_diagnosticless_buffers(cx, false); self.project.update(cx, |project, cx| { self.paths_to_update = project .diagnostic_summaries(false, cx) @@ -498,7 +491,7 @@ impl ProjectDiagnosticsEditor { cx: &mut Context, ) -> Task> { let was_empty = self.multibuffer.read(cx).is_empty(); - let mut buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_snapshot = buffer.read(cx).snapshot(); let buffer_id = buffer_snapshot.remote_id(); let max_severity = if self.include_warnings { @@ -552,11 +545,15 @@ impl ProjectDiagnosticsEditor { if group_severity.is_none_or(|s| s > max_severity) { continue; } + let languages = this + .read_with(cx, |t, cx| t.project.read(cx).languages().clone()) + .ok(); let more = cx.update(|_, cx| { crate::diagnostic_renderer::DiagnosticRenderer::diagnostic_blocks_for_group( group, buffer_snapshot.remote_id(), Some(diagnostics_toolbar_editor.clone()), + languages, cx, ) })?; @@ -605,7 +602,6 @@ impl ProjectDiagnosticsEditor { cx, ) .await; - buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?; let initial_range = buffer_snapshot.anchor_after(b.initial_range.start) ..buffer_snapshot.anchor_before(b.initial_range.end); let excerpt_range = ExcerptRange { @@ -654,7 +650,7 @@ impl ProjectDiagnosticsEditor { }) }); if this.focus_handle.is_focused(window) { - this.editor.read(cx).focus_handle(cx).focus(window); + this.editor.read(cx).focus_handle(cx).focus(window, cx); } } @@ -884,22 +880,26 @@ impl Item for ProjectDiagnosticsEditor { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } - fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { - ToolbarItemLocation::PrimaryLeft + fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation { + if EditorSettings::get_global(cx).toolbar.breadcrumbs { + ToolbarItemLocation::PrimaryLeft + } else { + ToolbarItemLocation::Hidden + } } fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { @@ -1013,11 +1013,14 @@ async fn heuristic_syntactic_expand( snapshot: BufferSnapshot, cx: &mut AsyncApp, ) -> Option> { + let start = snapshot.clip_point(input_range.start, Bias::Right); + let end = snapshot.clip_point(input_range.end, Bias::Left); let input_row_count = input_range.end.row - input_range.start.row; if input_row_count > max_row_count { return None; } + let input_range = start..end; // If the outline node contains the diagnostic and is small enough, just use that. let outline_range = snapshot.outline_range_containing(input_range.clone()); if let Some(outline_range) = outline_range.clone() { @@ -1046,54 +1049,47 @@ async fn heuristic_syntactic_expand( let node_range = node_start..node_end; let row_count = node_end.row - node_start.row + 1; let mut ancestor_range = None; - let reached_outline_node = cx.background_executor().scoped({ - let node_range = node_range.clone(); - let outline_range = outline_range.clone(); - let ancestor_range = &mut ancestor_range; - |scope| { - scope.spawn(async move { - // Stop if we've exceeded the row count or reached an outline node. Then, find the interval - // of node children which contains the query range. For example, this allows just returning - // the header of a declaration rather than the entire declaration. - if row_count > max_row_count || outline_range == Some(node_range.clone()) { - let mut cursor = node.walk(); - let mut included_child_start = None; - let mut included_child_end = None; - let mut previous_end = node_start; - if cursor.goto_first_child() { - loop { - let child_node = cursor.node(); - let child_range = - previous_end..Point::from_ts_point(child_node.end_position()); - if included_child_start.is_none() - && child_range.contains(&input_range.start) - { - included_child_start = Some(child_range.start); - } - if child_range.contains(&input_range.end) { - included_child_end = Some(child_range.end); - } - previous_end = child_range.end; - if !cursor.goto_next_sibling() { - break; - } + cx.background_executor() + .await_on_background(async { + // Stop if we've exceeded the row count or reached an outline node. Then, find the interval + // of node children which contains the query range. For example, this allows just returning + // the header of a declaration rather than the entire declaration. + if row_count > max_row_count || outline_range == Some(node_range.clone()) { + let mut cursor = node.walk(); + let mut included_child_start = None; + let mut included_child_end = None; + let mut previous_end = node_start; + if cursor.goto_first_child() { + loop { + let child_node = cursor.node(); + let child_range = + previous_end..Point::from_ts_point(child_node.end_position()); + if included_child_start.is_none() + && child_range.contains(&input_range.start) + { + included_child_start = Some(child_range.start); } - } - let end = included_child_end.unwrap_or(node_range.end); - if let Some(start) = included_child_start { - let row_count = end.row - start.row; - if row_count < max_row_count { - *ancestor_range = - Some(Some(RangeInclusive::new(start.row, end.row))); - return; + if child_range.contains(&input_range.end) { + included_child_end = Some(child_range.end); + } + previous_end = child_range.end; + if !cursor.goto_next_sibling() { + break; } } - *ancestor_range = Some(None); } - }) - } - }); - reached_outline_node.await; + let end = included_child_end.unwrap_or(node_range.end); + if let Some(start) = included_child_start { + let row_count = end.row - start.row; + if row_count < max_row_count { + ancestor_range = Some(Some(RangeInclusive::new(start.row, end.row))); + return; + } + } + ancestor_range = Some(None); + } + }) + .await; if let Some(node) = ancestor_range { return node; } diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 3d1d3840108c6842d57190bd56fd9db3984af7c6..d2504fde4a6bcb828db75f85f01aea2f296bd9dd 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -1,7 +1,7 @@ use super::*; use collections::{HashMap, HashSet}; use editor::{ - DisplayPoint, EditorSettings, Inlay, + DisplayPoint, EditorSettings, Inlay, MultiBufferOffset, actions::{GoToDiagnostic, GoToPreviousDiagnostic, Hover, MoveToBeginning}, display_map::DisplayRow, test::{ @@ -878,7 +878,8 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S diagnostics.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); if !snapshot.buffer_snapshot().is_empty() { - let position = rng.random_range(0..snapshot.buffer_snapshot().len()); + let position = rng + .random_range(MultiBufferOffset(0)..snapshot.buffer_snapshot().len()); let position = snapshot.buffer_snapshot().clip_offset(position, Bias::Left); log::info!( "adding inlay at {position}/{}: {:?}", diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 413bad5c0d696bfcba92a1127789c9e7c31edc30..b4ca52ea7239b6e4e76160a475d703ddd2933f44 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -1,6 +1,6 @@ use std::time::Duration; -use editor::Editor; +use editor::{Editor, MultiBufferOffset}; use gpui::{ Context, Entity, EventEmitter, IntoElement, ParentElement, Render, Styled, Subscription, Task, WeakEntity, Window, @@ -171,14 +171,19 @@ impl DiagnosticIndicator { let buffer = editor.buffer().read(cx).snapshot(cx); let cursor_position = editor .selections - .newest::(&editor.display_snapshot(cx)) + .newest::(&editor.display_snapshot(cx)) .head(); (buffer, cursor_position) }); let new_diagnostic = buffer - .diagnostics_in_range::(cursor_position..cursor_position) + .diagnostics_in_range::(cursor_position..cursor_position) .filter(|entry| !entry.range.is_empty()) - .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .min_by_key(|entry| { + ( + entry.diagnostic.severity, + entry.range.end - entry.range.start, + ) + }) .map(|entry| entry.diagnostic); if new_diagnostic != self.current_diagnostic.as_ref() { let new_diagnostic = new_diagnostic.cloned(); diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index 2c6888d14be49c857e7805fb63f9f9335ac32c8e..2d5fb36a581f7bd17bb76f79791c276c86c9c631 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -11,7 +11,67 @@ workspace = true [lib] path = "src/edit_prediction.rs" +[features] +cli-support = [] + [dependencies] +ai_onboarding.workspace = true +anyhow.workspace = true +arrayvec.workspace = true +brotli.workspace = true client.workspace = true +cloud_llm_client.workspace = true +collections.workspace = true +copilot.workspace = true +db.workspace = true +edit_prediction_types.workspace = true +edit_prediction_context.workspace = true +feature_flags.workspace = true +fs.workspace = true +futures.workspace = true gpui.workspace = true +indoc.workspace = true +itertools.workspace = true language.workspace = true +language_model.workspace = true +log.workspace = true +lsp.workspace = true +menu.workspace = true +open_ai.workspace = true +postage.workspace = true +pretty_assertions.workspace = true +project.workspace = true +pulldown-cmark.workspace = true +rand.workspace = true +regex.workspace = true +release_channel.workspace = true +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +strum.workspace = true +telemetry.workspace = true +telemetry_events.workspace = true +thiserror.workspace = true +ui.workspace = true +util.workspace = true +uuid.workspace = true +workspace.workspace = true +worktree.workspace = true +zed_actions.workspace = true +zeta_prompt.workspace = true + +[dev-dependencies] +clock = { workspace = true, features = ["test-support"] } +cloud_api_types.workspace = true +cloud_llm_client = { workspace = true, features = ["test-support"] } +ctor.workspace = true +gpui = { workspace = true, features = ["test-support"] } +indoc.workspace = true +language = { workspace = true, features = ["test-support"] } +language_model = { workspace = true, features = ["test-support"] } +lsp.workspace = true +parking_lot.workspace = true +project = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } +zlog.workspace = true diff --git a/crates/zeta/license_examples/0bsd.txt b/crates/edit_prediction/license_examples/0bsd.txt similarity index 100% rename from crates/zeta/license_examples/0bsd.txt rename to crates/edit_prediction/license_examples/0bsd.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex0.txt b/crates/edit_prediction/license_examples/apache-2.0-ex0.txt similarity index 100% rename from crates/zeta/license_examples/apache-2.0-ex0.txt rename to crates/edit_prediction/license_examples/apache-2.0-ex0.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex1.txt b/crates/edit_prediction/license_examples/apache-2.0-ex1.txt similarity index 100% rename from crates/zeta/license_examples/apache-2.0-ex1.txt rename to crates/edit_prediction/license_examples/apache-2.0-ex1.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex2.txt b/crates/edit_prediction/license_examples/apache-2.0-ex2.txt similarity index 100% rename from crates/zeta/license_examples/apache-2.0-ex2.txt rename to crates/edit_prediction/license_examples/apache-2.0-ex2.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex3.txt b/crates/edit_prediction/license_examples/apache-2.0-ex3.txt similarity index 100% rename from crates/zeta/license_examples/apache-2.0-ex3.txt rename to crates/edit_prediction/license_examples/apache-2.0-ex3.txt diff --git a/crates/zeta/license_examples/apache-2.0-ex4.txt b/crates/edit_prediction/license_examples/apache-2.0-ex4.txt similarity index 100% rename from crates/zeta/license_examples/apache-2.0-ex4.txt rename to crates/edit_prediction/license_examples/apache-2.0-ex4.txt diff --git a/crates/zeta/license_examples/bsd-1-clause.txt b/crates/edit_prediction/license_examples/bsd-1-clause.txt similarity index 100% rename from crates/zeta/license_examples/bsd-1-clause.txt rename to crates/edit_prediction/license_examples/bsd-1-clause.txt diff --git a/crates/zeta/license_examples/bsd-2-clause-ex0.txt b/crates/edit_prediction/license_examples/bsd-2-clause-ex0.txt similarity index 100% rename from crates/zeta/license_examples/bsd-2-clause-ex0.txt rename to crates/edit_prediction/license_examples/bsd-2-clause-ex0.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex0.txt b/crates/edit_prediction/license_examples/bsd-3-clause-ex0.txt similarity index 100% rename from crates/zeta/license_examples/bsd-3-clause-ex0.txt rename to crates/edit_prediction/license_examples/bsd-3-clause-ex0.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex1.txt b/crates/edit_prediction/license_examples/bsd-3-clause-ex1.txt similarity index 100% rename from crates/zeta/license_examples/bsd-3-clause-ex1.txt rename to crates/edit_prediction/license_examples/bsd-3-clause-ex1.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex2.txt b/crates/edit_prediction/license_examples/bsd-3-clause-ex2.txt similarity index 100% rename from crates/zeta/license_examples/bsd-3-clause-ex2.txt rename to crates/edit_prediction/license_examples/bsd-3-clause-ex2.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex3.txt b/crates/edit_prediction/license_examples/bsd-3-clause-ex3.txt similarity index 100% rename from crates/zeta/license_examples/bsd-3-clause-ex3.txt rename to crates/edit_prediction/license_examples/bsd-3-clause-ex3.txt diff --git a/crates/zeta/license_examples/bsd-3-clause-ex4.txt b/crates/edit_prediction/license_examples/bsd-3-clause-ex4.txt similarity index 100% rename from crates/zeta/license_examples/bsd-3-clause-ex4.txt rename to crates/edit_prediction/license_examples/bsd-3-clause-ex4.txt diff --git a/crates/zeta/license_examples/isc.txt b/crates/edit_prediction/license_examples/isc.txt similarity index 100% rename from crates/zeta/license_examples/isc.txt rename to crates/edit_prediction/license_examples/isc.txt diff --git a/crates/zeta/license_examples/mit-ex0.txt b/crates/edit_prediction/license_examples/mit-ex0.txt similarity index 100% rename from crates/zeta/license_examples/mit-ex0.txt rename to crates/edit_prediction/license_examples/mit-ex0.txt diff --git a/crates/zeta/license_examples/mit-ex1.txt b/crates/edit_prediction/license_examples/mit-ex1.txt similarity index 100% rename from crates/zeta/license_examples/mit-ex1.txt rename to crates/edit_prediction/license_examples/mit-ex1.txt diff --git a/crates/zeta/license_examples/mit-ex2.txt b/crates/edit_prediction/license_examples/mit-ex2.txt similarity index 100% rename from crates/zeta/license_examples/mit-ex2.txt rename to crates/edit_prediction/license_examples/mit-ex2.txt diff --git a/crates/zeta/license_examples/mit-ex3.txt b/crates/edit_prediction/license_examples/mit-ex3.txt similarity index 100% rename from crates/zeta/license_examples/mit-ex3.txt rename to crates/edit_prediction/license_examples/mit-ex3.txt diff --git a/crates/zeta/license_examples/upl-1.0.txt b/crates/edit_prediction/license_examples/upl-1.0.txt similarity index 100% rename from crates/zeta/license_examples/upl-1.0.txt rename to crates/edit_prediction/license_examples/upl-1.0.txt diff --git a/crates/zeta/license_examples/zlib-ex0.txt b/crates/edit_prediction/license_examples/zlib-ex0.txt similarity index 100% rename from crates/zeta/license_examples/zlib-ex0.txt rename to crates/edit_prediction/license_examples/zlib-ex0.txt diff --git a/crates/zeta/license_patterns/0bsd-pattern b/crates/edit_prediction/license_patterns/0bsd-pattern similarity index 100% rename from crates/zeta/license_patterns/0bsd-pattern rename to crates/edit_prediction/license_patterns/0bsd-pattern diff --git a/crates/zeta/license_patterns/apache-2.0-pattern b/crates/edit_prediction/license_patterns/apache-2.0-pattern similarity index 100% rename from crates/zeta/license_patterns/apache-2.0-pattern rename to crates/edit_prediction/license_patterns/apache-2.0-pattern diff --git a/crates/zeta/license_patterns/apache-2.0-reference-pattern b/crates/edit_prediction/license_patterns/apache-2.0-reference-pattern similarity index 100% rename from crates/zeta/license_patterns/apache-2.0-reference-pattern rename to crates/edit_prediction/license_patterns/apache-2.0-reference-pattern diff --git a/crates/zeta/license_patterns/bsd-pattern b/crates/edit_prediction/license_patterns/bsd-pattern similarity index 100% rename from crates/zeta/license_patterns/bsd-pattern rename to crates/edit_prediction/license_patterns/bsd-pattern diff --git a/crates/zeta/license_patterns/isc-pattern b/crates/edit_prediction/license_patterns/isc-pattern similarity index 100% rename from crates/zeta/license_patterns/isc-pattern rename to crates/edit_prediction/license_patterns/isc-pattern diff --git a/crates/zeta/license_patterns/mit-pattern b/crates/edit_prediction/license_patterns/mit-pattern similarity index 100% rename from crates/zeta/license_patterns/mit-pattern rename to crates/edit_prediction/license_patterns/mit-pattern diff --git a/crates/zeta/license_patterns/upl-1.0-pattern b/crates/edit_prediction/license_patterns/upl-1.0-pattern similarity index 100% rename from crates/zeta/license_patterns/upl-1.0-pattern rename to crates/edit_prediction/license_patterns/upl-1.0-pattern diff --git a/crates/zeta/license_patterns/zlib-pattern b/crates/edit_prediction/license_patterns/zlib-pattern similarity index 100% rename from crates/zeta/license_patterns/zlib-pattern rename to crates/edit_prediction/license_patterns/zlib-pattern diff --git a/crates/edit_prediction/src/cursor_excerpt.rs b/crates/edit_prediction/src/cursor_excerpt.rs new file mode 100644 index 0000000000000000000000000000000000000000..1f2f1d32ebcb2eaa151433bd49d275e0e2a3b817 --- /dev/null +++ b/crates/edit_prediction/src/cursor_excerpt.rs @@ -0,0 +1,78 @@ +use language::{BufferSnapshot, Point}; +use std::ops::Range; + +pub fn editable_and_context_ranges_for_cursor_position( + position: Point, + snapshot: &BufferSnapshot, + editable_region_token_limit: usize, + context_token_limit: usize, +) -> (Range, Range) { + let mut scope_range = position..position; + let mut remaining_edit_tokens = editable_region_token_limit; + + while let Some(parent) = snapshot.syntax_ancestor(scope_range.clone()) { + let parent_tokens = guess_token_count(parent.byte_range().len()); + let parent_point_range = Point::new( + parent.start_position().row as u32, + parent.start_position().column as u32, + ) + ..Point::new( + parent.end_position().row as u32, + parent.end_position().column as u32, + ); + if parent_point_range == scope_range { + break; + } else if parent_tokens <= editable_region_token_limit { + scope_range = parent_point_range; + remaining_edit_tokens = editable_region_token_limit - parent_tokens; + } else { + break; + } + } + + let editable_range = expand_range(snapshot, scope_range, remaining_edit_tokens); + let context_range = expand_range(snapshot, editable_range.clone(), context_token_limit); + (editable_range, context_range) +} + +fn expand_range( + snapshot: &BufferSnapshot, + range: Range, + mut remaining_tokens: usize, +) -> Range { + let mut expanded_range = range; + expanded_range.start.column = 0; + expanded_range.end.column = snapshot.line_len(expanded_range.end.row); + loop { + let mut expanded = false; + + if remaining_tokens > 0 && expanded_range.start.row > 0 { + expanded_range.start.row -= 1; + let line_tokens = + guess_token_count(snapshot.line_len(expanded_range.start.row) as usize); + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + + if remaining_tokens > 0 && expanded_range.end.row < snapshot.max_point().row { + expanded_range.end.row += 1; + expanded_range.end.column = snapshot.line_len(expanded_range.end.row); + let line_tokens = guess_token_count(expanded_range.end.column as usize); + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + + if !expanded { + break; + } + } + expanded_range +} + +/// Typical number of string bytes per token for the purposes of limiting model input. This is +/// intentionally low to err on the side of underestimating limits. +pub(crate) const BYTES_PER_TOKEN_GUESS: usize = 3; + +pub fn guess_token_count(bytes: usize) -> usize { + bytes / BYTES_PER_TOKEN_GUESS +} diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index c9bb0672a0c9cb7c56c3c703b0e10594d56cc0c1..f5ea7590fcba97ee916af985824e21cdf4ea725f 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1,292 +1,2132 @@ -use std::{ops::Range, sync::Arc}; +use anyhow::Result; +use arrayvec::ArrayVec; +use client::{Client, EditPredictionUsage, UserStore}; +use cloud_llm_client::predict_edits_v3::{self, PromptFormat}; +use cloud_llm_client::{ + AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, EditPredictionRejectReason, + EditPredictionRejection, MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST, + MINIMUM_REQUIRED_VERSION_HEADER_NAME, PredictEditsRequestTrigger, RejectEditPredictionsBodyRef, + ZED_VERSION_HEADER_NAME, +}; +use collections::{HashMap, HashSet}; +use db::kvp::{Dismissable, KEY_VALUE_STORE}; +use edit_prediction_context::EditPredictionExcerptOptions; +use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile}; +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use futures::{ + AsyncReadExt as _, FutureExt as _, StreamExt as _, + channel::mpsc::{self, UnboundedReceiver}, + select_biased, +}; +use gpui::BackgroundExecutor; +use gpui::http_client::Url; +use gpui::{ + App, AsyncApp, Entity, EntityId, Global, SharedString, Subscription, Task, WeakEntity, actions, + http_client::{self, AsyncBody, Method}, + prelude::*, +}; +use language::language_settings::all_language_settings; +use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToPoint}; +use language::{BufferSnapshot, OffsetRangeExt}; +use language_model::{LlmApiToken, RefreshLlmTokenListener}; +use project::{Project, ProjectPath, WorktreeId}; +use release_channel::AppVersion; +use semver::Version; +use serde::de::DeserializeOwned; +use settings::{EditPredictionProvider, SettingsStore, update_settings_file}; +use std::collections::{VecDeque, hash_map}; +use workspace::Workspace; -use client::EditPredictionUsage; -use gpui::{App, Context, Entity, SharedString}; -use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt}; +use std::ops::Range; +use std::path::Path; +use std::rc::Rc; +use std::str::FromStr as _; +use std::sync::{Arc, LazyLock}; +use std::time::{Duration, Instant}; +use std::{env, mem}; +use thiserror::Error; +use util::{RangeExt as _, ResultExt as _}; +use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; -// TODO: Find a better home for `Direction`. -// -// This should live in an ancestor crate of `editor` and `edit_prediction`, -// but at time of writing there isn't an obvious spot. -#[derive(Copy, Clone, PartialEq, Eq)] -pub enum Direction { - Prev, - Next, +pub mod cursor_excerpt; +pub mod example_spec; +mod license_detection; +pub mod mercury; +mod onboarding_modal; +pub mod open_ai_response; +mod prediction; +pub mod sweep_ai; + +#[cfg(any(test, feature = "test-support", feature = "cli-support"))] +pub mod udiff; + +mod zed_edit_prediction_delegate; +pub mod zeta1; +pub mod zeta2; + +#[cfg(test)] +mod edit_prediction_tests; + +use crate::license_detection::LicenseDetectionWatcher; +use crate::mercury::Mercury; +use crate::onboarding_modal::ZedPredictModal; +pub use crate::prediction::EditPrediction; +pub use crate::prediction::EditPredictionId; +use crate::prediction::EditPredictionResult; +pub use crate::sweep_ai::SweepAi; +pub use language_model::ApiKeyState; +pub use telemetry_events::EditPredictionRating; +pub use zed_edit_prediction_delegate::ZedEditPredictionDelegate; + +actions!( + edit_prediction, + [ + /// Resets the edit prediction onboarding state. + ResetOnboarding, + /// Clears the edit prediction history. + ClearHistory, + ] +); + +/// Maximum number of events to track. +const EVENT_COUNT_MAX: usize = 6; +const CHANGE_GROUPING_LINE_SPAN: u32 = 8; +const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1); +const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice"; +const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15); + +pub struct SweepFeatureFlag; + +impl FeatureFlag for SweepFeatureFlag { + const NAME: &str = "sweep-ai"; } -#[derive(Clone)] -pub enum EditPrediction { - /// Edits within the buffer that requested the prediction - Local { - id: Option, - edits: Vec<(Range, Arc)>, - edit_preview: Option, - }, - /// Jump to a different file from the one that requested the prediction - Jump { - id: Option, - snapshot: language::BufferSnapshot, - target: language::Anchor, +pub struct MercuryFeatureFlag; + +impl FeatureFlag for MercuryFeatureFlag { + const NAME: &str = "mercury"; +} + +pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions { + context: EditPredictionExcerptOptions { + max_bytes: 512, + min_bytes: 128, + target_before_cursor_over_total_bytes: 0.5, }, + prompt_format: PromptFormat::DEFAULT, +}; + +static USE_OLLAMA: LazyLock = + LazyLock::new(|| env::var("ZED_ZETA2_OLLAMA").is_ok_and(|var| !var.is_empty())); + +static EDIT_PREDICTIONS_MODEL_ID: LazyLock = LazyLock::new(|| { + match env::var("ZED_ZETA2_MODEL").as_deref() { + Ok("zeta2-exp") => "4w5n28vw", // Fine-tuned model @ Baseten + Ok(model) => model, + Err(_) if *USE_OLLAMA => "qwen3-coder:30b", + Err(_) => "yqvev8r3", // Vanilla qwen3-coder @ Baseten + } + .to_string() +}); + +pub struct Zeta2FeatureFlag; + +impl FeatureFlag for Zeta2FeatureFlag { + const NAME: &'static str = "zeta2"; + + fn enabled_for_staff() -> bool { + true + } } -pub enum DataCollectionState { - /// The provider doesn't support data collection. - Unsupported, - /// Data collection is enabled. - Enabled { is_project_open_source: bool }, - /// Data collection is disabled or unanswered. - Disabled { is_project_open_source: bool }, +#[derive(Clone)] +struct EditPredictionStoreGlobal(Entity); + +impl Global for EditPredictionStoreGlobal {} + +pub struct EditPredictionStore { + client: Arc, + user_store: Entity, + llm_token: LlmApiToken, + _llm_token_subscription: Subscription, + projects: HashMap, + use_context: bool, + options: ZetaOptions, + update_required: bool, + #[cfg(feature = "cli-support")] + eval_cache: Option>, + edit_prediction_model: EditPredictionModel, + pub sweep_ai: SweepAi, + pub mercury: Mercury, + data_collection_choice: DataCollectionChoice, + reject_predictions_tx: mpsc::UnboundedSender, + shown_predictions: VecDeque, + rated_predictions: HashSet, + custom_predict_edits_url: Option>, } -impl DataCollectionState { - pub fn is_supported(&self) -> bool { - !matches!(self, DataCollectionState::Unsupported) +#[derive(Copy, Clone, Default, PartialEq, Eq)] +pub enum EditPredictionModel { + #[default] + Zeta1, + Zeta2, + Sweep, + Mercury, +} + +pub struct EditPredictionModelInput { + project: Entity, + buffer: Entity, + snapshot: BufferSnapshot, + position: Anchor, + events: Vec>, + related_files: Arc<[RelatedFile]>, + recent_paths: VecDeque, + trigger: PredictEditsRequestTrigger, + diagnostic_search_range: Range, + debug_tx: Option>, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ZetaOptions { + pub context: EditPredictionExcerptOptions, + pub prompt_format: predict_edits_v3::PromptFormat, +} + +#[derive(Debug)] +pub enum DebugEvent { + ContextRetrievalStarted(ContextRetrievalStartedDebugEvent), + ContextRetrievalFinished(ContextRetrievalFinishedDebugEvent), + EditPredictionStarted(EditPredictionStartedDebugEvent), + EditPredictionFinished(EditPredictionFinishedDebugEvent), +} + +#[derive(Debug)] +pub struct ContextRetrievalStartedDebugEvent { + pub project_entity_id: EntityId, + pub timestamp: Instant, + pub search_prompt: String, +} + +#[derive(Debug)] +pub struct ContextRetrievalFinishedDebugEvent { + pub project_entity_id: EntityId, + pub timestamp: Instant, + pub metadata: Vec<(&'static str, SharedString)>, +} + +#[derive(Debug)] +pub struct EditPredictionStartedDebugEvent { + pub buffer: WeakEntity, + pub position: Anchor, + pub prompt: Option, +} + +#[derive(Debug)] +pub struct EditPredictionFinishedDebugEvent { + pub buffer: WeakEntity, + pub position: Anchor, + pub model_output: Option, +} + +pub type RequestDebugInfo = predict_edits_v3::DebugInfo; + +struct ProjectState { + events: VecDeque>, + last_event: Option, + recent_paths: VecDeque, + registered_buffers: HashMap, + current_prediction: Option, + next_pending_prediction_id: usize, + pending_predictions: ArrayVec, + debug_tx: Option>, + last_prediction_refresh: Option<(EntityId, Instant)>, + cancelled_predictions: HashSet, + context: Entity, + license_detection_watchers: HashMap>, + _subscription: gpui::Subscription, +} + +impl ProjectState { + pub fn events(&self, cx: &App) -> Vec> { + self.events + .iter() + .cloned() + .chain( + self.last_event + .as_ref() + .and_then(|event| event.finalize(&self.license_detection_watchers, cx)), + ) + .collect() + } + + pub fn events_split_by_pause(&self, cx: &App) -> Vec> { + self.events + .iter() + .cloned() + .chain(self.last_event.as_ref().iter().flat_map(|event| { + let (one, two) = event.split_by_pause(); + let one = one.finalize(&self.license_detection_watchers, cx); + let two = two.and_then(|two| two.finalize(&self.license_detection_watchers, cx)); + one.into_iter().chain(two) + })) + .collect() + } + + fn cancel_pending_prediction( + &mut self, + pending_prediction: PendingPrediction, + cx: &mut Context, + ) { + self.cancelled_predictions.insert(pending_prediction.id); + + cx.spawn(async move |this, cx| { + let Some(prediction_id) = pending_prediction.task.await else { + return; + }; + + this.update(cx, |this, _cx| { + this.reject_prediction(prediction_id, EditPredictionRejectReason::Canceled, false); + }) + .ok(); + }) + .detach() } - pub fn is_enabled(&self) -> bool { - matches!(self, DataCollectionState::Enabled { .. }) + fn active_buffer( + &self, + project: &Entity, + cx: &App, + ) -> Option<(Entity, Option)> { + let project = project.read(cx); + let active_path = project.path_for_entry(project.active_entry()?, cx)?; + let active_buffer = project.buffer_store().read(cx).get_by_path(&active_path)?; + let registered_buffer = self.registered_buffers.get(&active_buffer.entity_id())?; + Some((active_buffer, registered_buffer.last_position)) } +} + +#[derive(Debug, Clone)] +struct CurrentEditPrediction { + pub requested_by: PredictionRequestedBy, + pub prediction: EditPrediction, + pub was_shown: bool, +} + +impl CurrentEditPrediction { + fn should_replace_prediction(&self, old_prediction: &Self, cx: &App) -> bool { + let Some(new_edits) = self + .prediction + .interpolate(&self.prediction.buffer.read(cx)) + else { + return false; + }; + + if self.prediction.buffer != old_prediction.prediction.buffer { + return true; + } + + let Some(old_edits) = old_prediction + .prediction + .interpolate(&old_prediction.prediction.buffer.read(cx)) + else { + return true; + }; - pub fn is_project_open_source(&self) -> bool { + let requested_by_buffer_id = self.requested_by.buffer_id(); + + // This reduces the occurrence of UI thrash from replacing edits + // + // TODO: This is fairly arbitrary - should have a more general heuristic that handles multiple edits. + if requested_by_buffer_id == Some(self.prediction.buffer.entity_id()) + && requested_by_buffer_id == Some(old_prediction.prediction.buffer.entity_id()) + && old_edits.len() == 1 + && new_edits.len() == 1 + { + let (old_range, old_text) = &old_edits[0]; + let (new_range, new_text) = &new_edits[0]; + new_range == old_range && new_text.starts_with(old_text.as_ref()) + } else { + true + } + } +} + +#[derive(Debug, Clone)] +enum PredictionRequestedBy { + DiagnosticsUpdate, + Buffer(EntityId), +} + +impl PredictionRequestedBy { + pub fn buffer_id(&self) -> Option { match self { - Self::Enabled { - is_project_open_source, - } - | Self::Disabled { - is_project_open_source, - } => *is_project_open_source, - _ => false, + PredictionRequestedBy::DiagnosticsUpdate => None, + PredictionRequestedBy::Buffer(buffer_id) => Some(*buffer_id), + } + } +} + +#[derive(Debug)] +struct PendingPrediction { + id: usize, + task: Task>, +} + +/// A prediction from the perspective of a buffer. +#[derive(Debug)] +enum BufferEditPrediction<'a> { + Local { prediction: &'a EditPrediction }, + Jump { prediction: &'a EditPrediction }, +} + +#[cfg(test)] +impl std::ops::Deref for BufferEditPrediction<'_> { + type Target = EditPrediction; + + fn deref(&self) -> &Self::Target { + match self { + BufferEditPrediction::Local { prediction } => prediction, + BufferEditPrediction::Jump { prediction } => prediction, } } } -pub trait EditPredictionProvider: 'static + Sized { - fn name() -> &'static str; - fn display_name() -> &'static str; - fn show_completions_in_menu() -> bool; - fn show_tab_accept_marker() -> bool { - false +struct RegisteredBuffer { + file: Option>, + snapshot: TextBufferSnapshot, + last_position: Option, + _subscriptions: [gpui::Subscription; 2], +} + +#[derive(Clone)] +struct LastEvent { + old_snapshot: TextBufferSnapshot, + new_snapshot: TextBufferSnapshot, + old_file: Option>, + new_file: Option>, + end_edit_anchor: Option, + snapshot_after_last_editing_pause: Option, + last_edit_time: Option, +} + +impl LastEvent { + pub fn finalize( + &self, + license_detection_watchers: &HashMap>, + cx: &App, + ) -> Option> { + let path = buffer_path_with_id_fallback(self.new_file.as_ref(), &self.new_snapshot, cx); + let old_path = buffer_path_with_id_fallback(self.old_file.as_ref(), &self.old_snapshot, cx); + + let in_open_source_repo = + [self.new_file.as_ref(), self.old_file.as_ref()] + .iter() + .all(|file| { + file.is_some_and(|file| { + license_detection_watchers + .get(&file.worktree_id(cx)) + .is_some_and(|watcher| watcher.is_project_open_source()) + }) + }); + + let diff = language::unified_diff(&self.old_snapshot.text(), &self.new_snapshot.text()); + + if path == old_path && diff.is_empty() { + None + } else { + Some(Arc::new(zeta_prompt::Event::BufferChange { + old_path, + path, + diff, + in_open_source_repo, + // TODO: Actually detect if this edit was predicted or not + predicted: false, + })) + } } - fn supports_jump_to_edit() -> bool { - true + + pub fn split_by_pause(&self) -> (LastEvent, Option) { + let Some(boundary_snapshot) = self.snapshot_after_last_editing_pause.as_ref() else { + return (self.clone(), None); + }; + + let before = LastEvent { + old_snapshot: self.old_snapshot.clone(), + new_snapshot: boundary_snapshot.clone(), + old_file: self.old_file.clone(), + new_file: self.new_file.clone(), + end_edit_anchor: self.end_edit_anchor, + snapshot_after_last_editing_pause: None, + last_edit_time: self.last_edit_time, + }; + + let after = LastEvent { + old_snapshot: boundary_snapshot.clone(), + new_snapshot: self.new_snapshot.clone(), + old_file: self.old_file.clone(), + new_file: self.new_file.clone(), + end_edit_anchor: self.end_edit_anchor, + snapshot_after_last_editing_pause: None, + last_edit_time: self.last_edit_time, + }; + + (before, Some(after)) + } +} + +fn buffer_path_with_id_fallback( + file: Option<&Arc>, + snapshot: &TextBufferSnapshot, + cx: &App, +) -> Arc { + if let Some(file) = file { + file.full_path(cx).into() + } else { + Path::new(&format!("untitled-{}", snapshot.remote_id())).into() + } +} + +impl EditPredictionStore { + pub fn try_global(cx: &App) -> Option> { + cx.try_global::() + .map(|global| global.0.clone()) + } + + pub fn global( + client: &Arc, + user_store: &Entity, + cx: &mut App, + ) -> Entity { + cx.try_global::() + .map(|global| global.0.clone()) + .unwrap_or_else(|| { + let ep_store = cx.new(|cx| Self::new(client.clone(), user_store.clone(), cx)); + cx.set_global(EditPredictionStoreGlobal(ep_store.clone())); + ep_store + }) + } + + pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { + let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); + let data_collection_choice = Self::load_data_collection_choice(); + + let llm_token = LlmApiToken::default(); + + let (reject_tx, reject_rx) = mpsc::unbounded(); + cx.background_spawn({ + let client = client.clone(); + let llm_token = llm_token.clone(); + let app_version = AppVersion::global(cx); + let background_executor = cx.background_executor().clone(); + async move { + Self::handle_rejected_predictions( + reject_rx, + client, + llm_token, + app_version, + background_executor, + ) + .await + } + }) + .detach(); + + let mut this = Self { + projects: HashMap::default(), + client, + user_store, + options: DEFAULT_OPTIONS, + use_context: false, + llm_token, + _llm_token_subscription: cx.subscribe( + &refresh_llm_token_listener, + |this, _listener, _event, cx| { + let client = this.client.clone(); + let llm_token = this.llm_token.clone(); + cx.spawn(async move |_this, _cx| { + llm_token.refresh(&client).await?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + }, + ), + update_required: false, + #[cfg(feature = "cli-support")] + eval_cache: None, + edit_prediction_model: EditPredictionModel::Zeta2, + sweep_ai: SweepAi::new(cx), + mercury: Mercury::new(cx), + data_collection_choice, + reject_predictions_tx: reject_tx, + rated_predictions: Default::default(), + shown_predictions: Default::default(), + custom_predict_edits_url: match env::var("ZED_PREDICT_EDITS_URL") { + Ok(custom_url) => Url::parse(&custom_url).log_err().map(Into::into), + Err(_) => { + if *USE_OLLAMA { + Some( + Url::parse("http://localhost:11434/v1/chat/completions") + .unwrap() + .into(), + ) + } else { + None + } + } + }, + }; + + this.configure_context_retrieval(cx); + let weak_this = cx.weak_entity(); + cx.on_flags_ready(move |_, cx| { + weak_this + .update(cx, |this, cx| this.configure_context_retrieval(cx)) + .ok(); + }) + .detach(); + cx.observe_global::(|this, cx| { + this.configure_context_retrieval(cx); + }) + .detach(); + + this + } + + #[cfg(test)] + pub fn set_custom_predict_edits_url(&mut self, url: Url) { + self.custom_predict_edits_url = Some(url.into()); + } + + pub fn set_edit_prediction_model(&mut self, model: EditPredictionModel) { + self.edit_prediction_model = model; } - fn data_collection_state(&self, _cx: &App) -> DataCollectionState { - DataCollectionState::Unsupported + pub fn has_sweep_api_token(&self, cx: &App) -> bool { + self.sweep_ai.api_token.read(cx).has_key() } - fn usage(&self, _cx: &App) -> Option { - None + pub fn has_mercury_api_token(&self, cx: &App) -> bool { + self.mercury.api_token.read(cx).has_key() } - fn toggle_data_collection(&mut self, _cx: &mut App) {} - fn is_enabled( + #[cfg(feature = "cli-support")] + pub fn with_eval_cache(&mut self, cache: Arc) { + self.eval_cache = Some(cache); + } + + pub fn options(&self) -> &ZetaOptions { + &self.options + } + + pub fn set_options(&mut self, options: ZetaOptions) { + self.options = options; + } + + pub fn set_use_context(&mut self, use_context: bool) { + self.use_context = use_context; + } + + pub fn clear_history(&mut self) { + for project_state in self.projects.values_mut() { + project_state.events.clear(); + } + } + + pub fn clear_history_for_project(&mut self, project: &Entity) { + if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { + project_state.events.clear(); + } + } + + pub fn edit_history_for_project( &self, - buffer: &Entity, - cursor_position: language::Anchor, + project: &Entity, + cx: &App, + ) -> Vec> { + self.projects + .get(&project.entity_id()) + .map(|project_state| project_state.events(cx)) + .unwrap_or_default() + } + + pub fn edit_history_for_project_with_pause_split_last_event( + &self, + project: &Entity, cx: &App, - ) -> bool; - fn is_refreshing(&self) -> bool; - fn refresh( + ) -> Vec> { + self.projects + .get(&project.entity_id()) + .map(|project_state| project_state.events_split_by_pause(cx)) + .unwrap_or_default() + } + + pub fn context_for_project<'a>( + &'a self, + project: &Entity, + cx: &'a App, + ) -> Arc<[RelatedFile]> { + self.projects + .get(&project.entity_id()) + .map(|project| project.context.read(cx).related_files()) + .unwrap_or_else(|| vec![].into()) + } + + pub fn context_for_project_with_buffers<'a>( + &'a self, + project: &Entity, + cx: &'a App, + ) -> Option)>> { + self.projects + .get(&project.entity_id()) + .map(|project| project.context.read(cx).related_files_with_buffers()) + } + + pub fn usage(&self, cx: &App) -> Option { + if self.edit_prediction_model == EditPredictionModel::Zeta2 { + self.user_store.read(cx).edit_prediction_usage() + } else { + None + } + } + + pub fn register_project(&mut self, project: &Entity, cx: &mut Context) { + self.get_or_init_project(project, cx); + } + + pub fn register_buffer( &mut self, - buffer: Entity, - cursor_position: language::Anchor, - debounce: bool, + buffer: &Entity, + project: &Entity, cx: &mut Context, - ); - fn cycle( + ) { + let project_state = self.get_or_init_project(project, cx); + Self::register_buffer_impl(project_state, buffer, project, cx); + } + + fn get_or_init_project( &mut self, - buffer: Entity, - cursor_position: language::Anchor, - direction: Direction, + project: &Entity, + cx: &mut Context, + ) -> &mut ProjectState { + let entity_id = project.entity_id(); + self.projects + .entry(entity_id) + .or_insert_with(|| ProjectState { + context: { + let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(project, cx)); + cx.subscribe(&related_excerpt_store, move |this, _, event, _| { + this.handle_excerpt_store_event(entity_id, event); + }) + .detach(); + related_excerpt_store + }, + events: VecDeque::new(), + last_event: None, + recent_paths: VecDeque::new(), + debug_tx: None, + registered_buffers: HashMap::default(), + current_prediction: None, + cancelled_predictions: HashSet::default(), + pending_predictions: ArrayVec::new(), + next_pending_prediction_id: 0, + last_prediction_refresh: None, + license_detection_watchers: HashMap::default(), + _subscription: cx.subscribe(&project, Self::handle_project_event), + }) + } + + pub fn remove_project(&mut self, project: &Entity) { + self.projects.remove(&project.entity_id()); + } + + fn handle_excerpt_store_event( + &mut self, + project_entity_id: EntityId, + event: &RelatedExcerptStoreEvent, + ) { + if let Some(project_state) = self.projects.get(&project_entity_id) { + if let Some(debug_tx) = project_state.debug_tx.clone() { + match event { + RelatedExcerptStoreEvent::StartedRefresh => { + debug_tx + .unbounded_send(DebugEvent::ContextRetrievalStarted( + ContextRetrievalStartedDebugEvent { + project_entity_id: project_entity_id, + timestamp: Instant::now(), + search_prompt: String::new(), + }, + )) + .ok(); + } + RelatedExcerptStoreEvent::FinishedRefresh { + cache_hit_count, + cache_miss_count, + mean_definition_latency, + max_definition_latency, + } => { + debug_tx + .unbounded_send(DebugEvent::ContextRetrievalFinished( + ContextRetrievalFinishedDebugEvent { + project_entity_id: project_entity_id, + timestamp: Instant::now(), + metadata: vec![ + ( + "Cache Hits", + format!( + "{}/{}", + cache_hit_count, + cache_hit_count + cache_miss_count + ) + .into(), + ), + ( + "Max LSP Time", + format!("{} ms", max_definition_latency.as_millis()) + .into(), + ), + ( + "Mean LSP Time", + format!("{} ms", mean_definition_latency.as_millis()) + .into(), + ), + ], + }, + )) + .ok(); + } + } + } + } + } + + pub fn debug_info( + &mut self, + project: &Entity, + cx: &mut Context, + ) -> mpsc::UnboundedReceiver { + let project_state = self.get_or_init_project(project, cx); + let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded(); + project_state.debug_tx = Some(debug_watch_tx); + debug_watch_rx + } + + fn handle_project_event( + &mut self, + project: Entity, + event: &project::Event, + cx: &mut Context, + ) { + // TODO [zeta2] init with recent paths + match event { + project::Event::ActiveEntryChanged(Some(active_entry_id)) => { + let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { + return; + }; + let path = project.read(cx).path_for_entry(*active_entry_id, cx); + if let Some(path) = path { + if let Some(ix) = project_state + .recent_paths + .iter() + .position(|probe| probe == &path) + { + project_state.recent_paths.remove(ix); + } + project_state.recent_paths.push_front(path); + } + } + project::Event::DiagnosticsUpdated { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics(project, cx); + } + } + _ => (), + } + } + + fn register_buffer_impl<'a>( + project_state: &'a mut ProjectState, + buffer: &Entity, + project: &Entity, cx: &mut Context, - ); - fn accept(&mut self, cx: &mut Context); - fn discard(&mut self, cx: &mut Context); - fn suggest( + ) -> &'a mut RegisteredBuffer { + let buffer_id = buffer.entity_id(); + + if let Some(file) = buffer.read(cx).file() { + let worktree_id = file.worktree_id(cx); + if let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) { + project_state + .license_detection_watchers + .entry(worktree_id) + .or_insert_with(|| { + let project_entity_id = project.entity_id(); + cx.observe_release(&worktree, move |this, _worktree, _cx| { + let Some(project_state) = this.projects.get_mut(&project_entity_id) + else { + return; + }; + project_state + .license_detection_watchers + .remove(&worktree_id); + }) + .detach(); + Rc::new(LicenseDetectionWatcher::new(&worktree, cx)) + }); + } + } + + match project_state.registered_buffers.entry(buffer_id) { + hash_map::Entry::Occupied(entry) => entry.into_mut(), + hash_map::Entry::Vacant(entry) => { + let buf = buffer.read(cx); + let snapshot = buf.text_snapshot(); + let file = buf.file().cloned(); + let project_entity_id = project.entity_id(); + entry.insert(RegisteredBuffer { + snapshot, + file, + last_position: None, + _subscriptions: [ + cx.subscribe(buffer, { + let project = project.downgrade(); + move |this, buffer, event, cx| { + if let language::BufferEvent::Edited = event + && let Some(project) = project.upgrade() + { + this.report_changes_for_buffer(&buffer, &project, cx); + } + } + }), + cx.observe_release(buffer, move |this, _buffer, _cx| { + let Some(project_state) = this.projects.get_mut(&project_entity_id) + else { + return; + }; + project_state.registered_buffers.remove(&buffer_id); + }), + ], + }) + } + } + } + + fn report_changes_for_buffer( &mut self, buffer: &Entity, - cursor_position: language::Anchor, + project: &Entity, cx: &mut Context, - ) -> Option; -} + ) { + let project_state = self.get_or_init_project(project, cx); + let registered_buffer = Self::register_buffer_impl(project_state, buffer, project, cx); -pub trait EditPredictionProviderHandle { - fn name(&self) -> &'static str; - fn display_name(&self) -> &'static str; - fn is_enabled( - &self, + let buf = buffer.read(cx); + let new_file = buf.file().cloned(); + let new_snapshot = buf.text_snapshot(); + if new_snapshot.version == registered_buffer.snapshot.version { + return; + } + + let old_file = mem::replace(&mut registered_buffer.file, new_file.clone()); + let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); + let end_edit_anchor = new_snapshot + .anchored_edits_since::(&old_snapshot.version) + .last() + .map(|(_, range)| range.end); + let events = &mut project_state.events; + + let now = cx.background_executor().now(); + if let Some(last_event) = project_state.last_event.as_mut() { + let is_next_snapshot_of_same_buffer = old_snapshot.remote_id() + == last_event.new_snapshot.remote_id() + && old_snapshot.version == last_event.new_snapshot.version; + + let should_coalesce = is_next_snapshot_of_same_buffer + && end_edit_anchor + .as_ref() + .zip(last_event.end_edit_anchor.as_ref()) + .is_some_and(|(a, b)| { + let a = a.to_point(&new_snapshot); + let b = b.to_point(&new_snapshot); + a.row.abs_diff(b.row) <= CHANGE_GROUPING_LINE_SPAN + }); + + if should_coalesce { + let pause_elapsed = last_event + .last_edit_time + .map(|t| now.duration_since(t) >= LAST_CHANGE_GROUPING_TIME) + .unwrap_or(false); + if pause_elapsed { + last_event.snapshot_after_last_editing_pause = + Some(last_event.new_snapshot.clone()); + } + + last_event.end_edit_anchor = end_edit_anchor; + last_event.new_snapshot = new_snapshot; + last_event.last_edit_time = Some(now); + return; + } + } + + if events.len() + 1 >= EVENT_COUNT_MAX { + events.pop_front(); + } + + if let Some(event) = project_state.last_event.take() { + events.extend(event.finalize(&project_state.license_detection_watchers, cx)); + } + + project_state.last_event = Some(LastEvent { + old_file, + new_file, + old_snapshot, + new_snapshot, + end_edit_anchor, + snapshot_after_last_editing_pause: None, + last_edit_time: Some(now), + }); + } + + fn prediction_at( + &mut self, buffer: &Entity, - cursor_position: language::Anchor, + position: Option, + project: &Entity, cx: &App, - ) -> bool; - fn show_completions_in_menu(&self) -> bool; - fn show_tab_accept_marker(&self) -> bool; - fn supports_jump_to_edit(&self) -> bool; - fn data_collection_state(&self, cx: &App) -> DataCollectionState; - fn usage(&self, cx: &App) -> Option; - fn toggle_data_collection(&self, cx: &mut App); - fn is_refreshing(&self, cx: &App) -> bool; - fn refresh( - &self, - buffer: Entity, - cursor_position: language::Anchor, - debounce: bool, - cx: &mut App, - ); - fn cycle( - &self, - buffer: Entity, - cursor_position: language::Anchor, - direction: Direction, - cx: &mut App, - ); - fn accept(&self, cx: &mut App); - fn discard(&self, cx: &mut App); - fn suggest( - &self, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &mut App, - ) -> Option; -} + ) -> Option> { + let project_state = self.projects.get_mut(&project.entity_id())?; + if let Some(position) = position + && let Some(buffer) = project_state + .registered_buffers + .get_mut(&buffer.entity_id()) + { + buffer.last_position = Some(position); + } -impl EditPredictionProviderHandle for Entity -where - T: EditPredictionProvider, -{ - fn name(&self) -> &'static str { - T::name() + let CurrentEditPrediction { + requested_by, + prediction, + .. + } = project_state.current_prediction.as_ref()?; + + if prediction.targets_buffer(buffer.read(cx)) { + Some(BufferEditPrediction::Local { prediction }) + } else { + let show_jump = match requested_by { + PredictionRequestedBy::Buffer(requested_by_buffer_id) => { + requested_by_buffer_id == &buffer.entity_id() + } + PredictionRequestedBy::DiagnosticsUpdate => true, + }; + + if show_jump { + Some(BufferEditPrediction::Jump { prediction }) + } else { + None + } + } } - fn display_name(&self) -> &'static str { - T::display_name() + fn accept_current_prediction(&mut self, project: &Entity, cx: &mut Context) { + let custom_accept_url = env::var("ZED_ACCEPT_PREDICTION_URL").ok(); + match self.edit_prediction_model { + EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + if self.custom_predict_edits_url.is_some() && custom_accept_url.is_none() { + return; + } + } + EditPredictionModel::Sweep | EditPredictionModel::Mercury => return, + } + + let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { + return; + }; + + let Some(prediction) = project_state.current_prediction.take() else { + return; + }; + let request_id = prediction.prediction.id.to_string(); + for pending_prediction in mem::take(&mut project_state.pending_predictions) { + project_state.cancel_pending_prediction(pending_prediction, cx); + } + + let client = self.client.clone(); + let llm_token = self.llm_token.clone(); + let app_version = AppVersion::global(cx); + cx.spawn(async move |this, cx| { + let (url, require_auth) = if let Some(accept_edits_url) = custom_accept_url { + (http_client::Url::parse(&accept_edits_url)?, false) + } else { + ( + client + .http_client() + .build_zed_llm_url("/predict_edits/accept", &[])?, + true, + ) + }; + + let response = cx + .background_spawn(Self::send_api_request::<()>( + move |builder| { + let req = builder.uri(url.as_ref()).body( + serde_json::to_string(&AcceptEditPredictionBody { + request_id: request_id.clone(), + })? + .into(), + ); + Ok(req?) + }, + client, + llm_token, + app_version, + require_auth, + )) + .await; + + Self::handle_api_response(&this, response, cx)?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + async fn handle_rejected_predictions( + rx: UnboundedReceiver, + client: Arc, + llm_token: LlmApiToken, + app_version: Version, + background_executor: BackgroundExecutor, + ) { + let mut rx = std::pin::pin!(rx.peekable()); + let mut batched = Vec::new(); + + while let Some(rejection) = rx.next().await { + batched.push(rejection); + + if batched.len() < MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST / 2 { + select_biased! { + next = rx.as_mut().peek().fuse() => { + if next.is_some() { + continue; + } + } + () = background_executor.timer(REJECT_REQUEST_DEBOUNCE).fuse() => {}, + } + } + + let url = client + .http_client() + .build_zed_llm_url("/predict_edits/reject", &[]) + .unwrap(); + + let flush_count = batched + .len() + // in case items have accumulated after failure + .min(MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST); + let start = batched.len() - flush_count; + + let body = RejectEditPredictionsBodyRef { + rejections: &batched[start..], + }; + + let result = Self::send_api_request::<()>( + |builder| { + let req = builder + .uri(url.as_ref()) + .body(serde_json::to_string(&body)?.into()); + anyhow::Ok(req?) + }, + client.clone(), + llm_token.clone(), + app_version.clone(), + true, + ) + .await; + + if result.log_err().is_some() { + batched.drain(start..); + } + } } - fn show_completions_in_menu(&self) -> bool { - T::show_completions_in_menu() + fn reject_current_prediction( + &mut self, + reason: EditPredictionRejectReason, + project: &Entity, + ) { + if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { + project_state.pending_predictions.clear(); + if let Some(prediction) = project_state.current_prediction.take() { + self.reject_prediction(prediction.prediction.id, reason, prediction.was_shown); + } + }; } - fn show_tab_accept_marker(&self) -> bool { - T::show_tab_accept_marker() + fn did_show_current_prediction(&mut self, project: &Entity, _cx: &mut Context) { + if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { + if let Some(current_prediction) = project_state.current_prediction.as_mut() { + if !current_prediction.was_shown { + current_prediction.was_shown = true; + self.shown_predictions + .push_front(current_prediction.prediction.clone()); + if self.shown_predictions.len() > 50 { + let completion = self.shown_predictions.pop_back().unwrap(); + self.rated_predictions.remove(&completion.id); + } + } + } + } } - fn supports_jump_to_edit(&self) -> bool { - T::supports_jump_to_edit() + fn reject_prediction( + &mut self, + prediction_id: EditPredictionId, + reason: EditPredictionRejectReason, + was_shown: bool, + ) { + match self.edit_prediction_model { + EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + if self.custom_predict_edits_url.is_some() { + return; + } + } + EditPredictionModel::Sweep | EditPredictionModel::Mercury => return, + } + + self.reject_predictions_tx + .unbounded_send(EditPredictionRejection { + request_id: prediction_id.to_string(), + reason, + was_shown, + }) + .log_err(); } - fn data_collection_state(&self, cx: &App) -> DataCollectionState { - self.read(cx).data_collection_state(cx) + fn is_refreshing(&self, project: &Entity) -> bool { + self.projects + .get(&project.entity_id()) + .is_some_and(|project_state| !project_state.pending_predictions.is_empty()) } - fn usage(&self, cx: &App) -> Option { - self.read(cx).usage(cx) + pub fn refresh_prediction_from_buffer( + &mut self, + project: Entity, + buffer: Entity, + position: language::Anchor, + cx: &mut Context, + ) { + self.queue_prediction_refresh(project.clone(), buffer.entity_id(), cx, move |this, cx| { + let Some(request_task) = this + .update(cx, |this, cx| { + this.request_prediction( + &project, + &buffer, + position, + PredictEditsRequestTrigger::Other, + cx, + ) + }) + .log_err() + else { + return Task::ready(anyhow::Ok(None)); + }; + + cx.spawn(async move |_cx| { + request_task.await.map(|prediction_result| { + prediction_result.map(|prediction_result| { + ( + prediction_result, + PredictionRequestedBy::Buffer(buffer.entity_id()), + ) + }) + }) + }) + }) } - fn toggle_data_collection(&self, cx: &mut App) { - self.update(cx, |this, cx| this.toggle_data_collection(cx)) + pub fn refresh_prediction_from_diagnostics( + &mut self, + project: Entity, + cx: &mut Context, + ) { + let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { + return; + }; + + // Prefer predictions from buffer + if project_state.current_prediction.is_some() { + return; + }; + + self.queue_prediction_refresh(project.clone(), project.entity_id(), cx, move |this, cx| { + let Some((active_buffer, snapshot, cursor_point)) = this + .read_with(cx, |this, cx| { + let project_state = this.projects.get(&project.entity_id())?; + let (buffer, position) = project_state.active_buffer(&project, cx)?; + let snapshot = buffer.read(cx).snapshot(); + + if !Self::predictions_enabled_at(&snapshot, position, cx) { + return None; + } + + let cursor_point = position + .map(|pos| pos.to_point(&snapshot)) + .unwrap_or_default(); + + Some((buffer, snapshot, cursor_point)) + }) + .log_err() + .flatten() + else { + return Task::ready(anyhow::Ok(None)); + }; + + cx.spawn(async move |cx| { + let Some((jump_buffer, jump_position)) = Self::next_diagnostic_location( + active_buffer, + &snapshot, + Default::default(), + cursor_point, + &project, + cx, + ) + .await? + else { + return anyhow::Ok(None); + }; + + let Some(prediction_result) = this + .update(cx, |this, cx| { + this.request_prediction( + &project, + &jump_buffer, + jump_position, + PredictEditsRequestTrigger::Diagnostics, + cx, + ) + })? + .await? + else { + return anyhow::Ok(None); + }; + + this.update(cx, |this, cx| { + Some(( + if this + .get_or_init_project(&project, cx) + .current_prediction + .is_none() + { + prediction_result + } else { + EditPredictionResult { + id: prediction_result.id, + prediction: Err(EditPredictionRejectReason::CurrentPreferred), + } + }, + PredictionRequestedBy::DiagnosticsUpdate, + )) + }) + }) + }); } - fn is_enabled( - &self, - buffer: &Entity, - cursor_position: language::Anchor, + fn predictions_enabled_at( + snapshot: &BufferSnapshot, + position: Option, cx: &App, ) -> bool { - self.read(cx).is_enabled(buffer, cursor_position, cx) - } + let file = snapshot.file(); + let all_settings = all_language_settings(file, cx); + if !all_settings.show_edit_predictions(snapshot.language(), cx) + || file.is_some_and(|file| !all_settings.edit_predictions_enabled_for_file(file, cx)) + { + return false; + } + + if let Some(last_position) = position { + let settings = snapshot.settings_at(last_position, cx); + + if !settings.edit_predictions_disabled_in.is_empty() + && let Some(scope) = snapshot.language_scope_at(last_position) + && let Some(scope_name) = scope.override_name() + && settings + .edit_predictions_disabled_in + .iter() + .any(|s| s == scope_name) + { + return false; + } + } - fn is_refreshing(&self, cx: &App) -> bool { - self.read(cx).is_refreshing() + true } - fn refresh( - &self, - buffer: Entity, - cursor_position: language::Anchor, - debounce: bool, - cx: &mut App, + #[cfg(not(test))] + pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300); + #[cfg(test)] + pub const THROTTLE_TIMEOUT: Duration = Duration::ZERO; + + fn queue_prediction_refresh( + &mut self, + project: Entity, + throttle_entity: EntityId, + cx: &mut Context, + do_refresh: impl FnOnce( + WeakEntity, + &mut AsyncApp, + ) + -> Task>> + + 'static, ) { - self.update(cx, |this, cx| { - this.refresh(buffer, cursor_position, debounce, cx) - }) + let project_state = self.get_or_init_project(&project, cx); + let pending_prediction_id = project_state.next_pending_prediction_id; + project_state.next_pending_prediction_id += 1; + let last_request = project_state.last_prediction_refresh; + + let task = cx.spawn(async move |this, cx| { + if let Some((last_entity, last_timestamp)) = last_request + && throttle_entity == last_entity + && let Some(timeout) = + (last_timestamp + Self::THROTTLE_TIMEOUT).checked_duration_since(Instant::now()) + { + cx.background_executor().timer(timeout).await; + } + + // If this task was cancelled before the throttle timeout expired, + // do not perform a request. + let mut is_cancelled = true; + this.update(cx, |this, cx| { + let project_state = this.get_or_init_project(&project, cx); + if !project_state + .cancelled_predictions + .remove(&pending_prediction_id) + { + project_state.last_prediction_refresh = Some((throttle_entity, Instant::now())); + is_cancelled = false; + } + }) + .ok(); + if is_cancelled { + return None; + } + + let new_prediction_result = do_refresh(this.clone(), cx).await.log_err().flatten(); + let new_prediction_id = new_prediction_result + .as_ref() + .map(|(prediction, _)| prediction.id.clone()); + + // When a prediction completes, remove it from the pending list, and cancel + // any pending predictions that were enqueued before it. + this.update(cx, |this, cx| { + let project_state = this.get_or_init_project(&project, cx); + + let is_cancelled = project_state + .cancelled_predictions + .remove(&pending_prediction_id); + + let new_current_prediction = if !is_cancelled + && let Some((prediction_result, requested_by)) = new_prediction_result + { + match prediction_result.prediction { + Ok(prediction) => { + let new_prediction = CurrentEditPrediction { + requested_by, + prediction, + was_shown: false, + }; + + if let Some(current_prediction) = + project_state.current_prediction.as_ref() + { + if new_prediction.should_replace_prediction(¤t_prediction, cx) + { + this.reject_current_prediction( + EditPredictionRejectReason::Replaced, + &project, + ); + + Some(new_prediction) + } else { + this.reject_prediction( + new_prediction.prediction.id, + EditPredictionRejectReason::CurrentPreferred, + false, + ); + None + } + } else { + Some(new_prediction) + } + } + Err(reject_reason) => { + this.reject_prediction(prediction_result.id, reject_reason, false); + None + } + } + } else { + None + }; + + let project_state = this.get_or_init_project(&project, cx); + + if let Some(new_prediction) = new_current_prediction { + project_state.current_prediction = Some(new_prediction); + } + + let mut pending_predictions = mem::take(&mut project_state.pending_predictions); + for (ix, pending_prediction) in pending_predictions.iter().enumerate() { + if pending_prediction.id == pending_prediction_id { + pending_predictions.remove(ix); + for pending_prediction in pending_predictions.drain(0..ix) { + project_state.cancel_pending_prediction(pending_prediction, cx) + } + break; + } + } + this.get_or_init_project(&project, cx).pending_predictions = pending_predictions; + cx.notify(); + }) + .ok(); + + new_prediction_id + }); + + if project_state.pending_predictions.len() <= 1 { + project_state.pending_predictions.push(PendingPrediction { + id: pending_prediction_id, + task, + }); + } else if project_state.pending_predictions.len() == 2 { + let pending_prediction = project_state.pending_predictions.pop().unwrap(); + project_state.pending_predictions.push(PendingPrediction { + id: pending_prediction_id, + task, + }); + project_state.cancel_pending_prediction(pending_prediction, cx); + } } - fn cycle( - &self, - buffer: Entity, - cursor_position: language::Anchor, - direction: Direction, - cx: &mut App, - ) { - self.update(cx, |this, cx| { - this.cycle(buffer, cursor_position, direction, cx) + pub fn request_prediction( + &mut self, + project: &Entity, + active_buffer: &Entity, + position: language::Anchor, + trigger: PredictEditsRequestTrigger, + cx: &mut Context, + ) -> Task>> { + self.request_prediction_internal( + project.clone(), + active_buffer.clone(), + position, + trigger, + cx.has_flag::(), + cx, + ) + } + + fn request_prediction_internal( + &mut self, + project: Entity, + active_buffer: Entity, + position: language::Anchor, + trigger: PredictEditsRequestTrigger, + allow_jump: bool, + cx: &mut Context, + ) -> Task>> { + const DIAGNOSTIC_LINES_RANGE: u32 = 20; + + self.get_or_init_project(&project, cx); + let project_state = self.projects.get(&project.entity_id()).unwrap(); + let events = project_state.events(cx); + let has_events = !events.is_empty(); + let debug_tx = project_state.debug_tx.clone(); + + let snapshot = active_buffer.read(cx).snapshot(); + let cursor_point = position.to_point(&snapshot); + let diagnostic_search_start = cursor_point.row.saturating_sub(DIAGNOSTIC_LINES_RANGE); + let diagnostic_search_end = cursor_point.row + DIAGNOSTIC_LINES_RANGE; + let diagnostic_search_range = + Point::new(diagnostic_search_start, 0)..Point::new(diagnostic_search_end, 0); + + let related_files = if self.use_context { + self.context_for_project(&project, cx) + } else { + Vec::new().into() + }; + + let inputs = EditPredictionModelInput { + project: project.clone(), + buffer: active_buffer.clone(), + snapshot: snapshot.clone(), + position, + events, + related_files, + recent_paths: project_state.recent_paths.clone(), + trigger, + diagnostic_search_range: diagnostic_search_range.clone(), + debug_tx, + }; + + let task = match self.edit_prediction_model { + EditPredictionModel::Zeta1 => zeta1::request_prediction_with_zeta1(self, inputs, cx), + EditPredictionModel::Zeta2 => zeta2::request_prediction_with_zeta2(self, inputs, cx), + EditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(inputs, cx), + EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), + }; + + cx.spawn(async move |this, cx| { + let prediction = task.await?; + + if prediction.is_none() && allow_jump { + let cursor_point = position.to_point(&snapshot); + if has_events + && let Some((jump_buffer, jump_position)) = Self::next_diagnostic_location( + active_buffer.clone(), + &snapshot, + diagnostic_search_range, + cursor_point, + &project, + cx, + ) + .await? + { + return this + .update(cx, |this, cx| { + this.request_prediction_internal( + project, + jump_buffer, + jump_position, + trigger, + false, + cx, + ) + })? + .await; + } + + return anyhow::Ok(None); + } + + Ok(prediction) }) } - fn accept(&self, cx: &mut App) { - self.update(cx, |this, cx| this.accept(cx)) + async fn next_diagnostic_location( + active_buffer: Entity, + active_buffer_snapshot: &BufferSnapshot, + active_buffer_diagnostic_search_range: Range, + active_buffer_cursor_point: Point, + project: &Entity, + cx: &mut AsyncApp, + ) -> Result, language::Anchor)>> { + // find the closest diagnostic to the cursor that wasn't close enough to be included in the last request + let mut jump_location = active_buffer_snapshot + .diagnostic_groups(None) + .into_iter() + .filter_map(|(_, group)| { + let range = &group.entries[group.primary_ix] + .range + .to_point(&active_buffer_snapshot); + if range.overlaps(&active_buffer_diagnostic_search_range) { + None + } else { + Some(range.start) + } + }) + .min_by_key(|probe| probe.row.abs_diff(active_buffer_cursor_point.row)) + .map(|position| { + ( + active_buffer.clone(), + active_buffer_snapshot.anchor_before(position), + ) + }); + + if jump_location.is_none() { + let active_buffer_path = active_buffer.read_with(cx, |buffer, cx| { + let file = buffer.file()?; + + Some(ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path().clone(), + }) + })?; + + let buffer_task = project.update(cx, |project, cx| { + let (path, _, _) = project + .diagnostic_summaries(false, cx) + .filter(|(path, _, _)| Some(path) != active_buffer_path.as_ref()) + .max_by_key(|(path, _, _)| { + // find the buffer with errors that shares most parent directories + path.path + .components() + .zip( + active_buffer_path + .as_ref() + .map(|p| p.path.components()) + .unwrap_or_default(), + ) + .take_while(|(a, b)| a == b) + .count() + })?; + + Some(project.open_buffer(path, cx)) + })?; + + if let Some(buffer_task) = buffer_task { + let closest_buffer = buffer_task.await?; + + jump_location = closest_buffer + .read_with(cx, |buffer, _cx| { + buffer + .buffer_diagnostics(None) + .into_iter() + .min_by_key(|entry| entry.diagnostic.severity) + .map(|entry| entry.range.start) + })? + .map(|position| (closest_buffer, position)); + } + } + + anyhow::Ok(jump_location) + } + + async fn send_raw_llm_request( + request: open_ai::Request, + client: Arc, + llm_token: LlmApiToken, + app_version: Version, + #[cfg(feature = "cli-support")] eval_cache: Option>, + #[cfg(feature = "cli-support")] eval_cache_kind: EvalCacheEntryKind, + ) -> Result<(open_ai::Response, Option)> { + let url = client + .http_client() + .build_zed_llm_url("/predict_edits/raw", &[])?; + + #[cfg(feature = "cli-support")] + let cache_key = if let Some(cache) = eval_cache { + use collections::FxHasher; + use std::hash::{Hash, Hasher}; + + let mut hasher = FxHasher::default(); + url.hash(&mut hasher); + let request_str = serde_json::to_string_pretty(&request)?; + request_str.hash(&mut hasher); + let hash = hasher.finish(); + + let key = (eval_cache_kind, hash); + if let Some(response_str) = cache.read(key) { + return Ok((serde_json::from_str(&response_str)?, None)); + } + + Some((cache, request_str, key)) + } else { + None + }; + + let (response, usage) = Self::send_api_request( + |builder| { + let req = builder + .uri(url.as_ref()) + .body(serde_json::to_string(&request)?.into()); + Ok(req?) + }, + client, + llm_token, + app_version, + true, + ) + .await?; + + #[cfg(feature = "cli-support")] + if let Some((cache, request, key)) = cache_key { + cache.write(key, &request, &serde_json::to_string_pretty(&response)?); + } + + Ok((response, usage)) } - fn discard(&self, cx: &mut App) { - self.update(cx, |this, cx| this.discard(cx)) + fn handle_api_response( + this: &WeakEntity, + response: Result<(T, Option)>, + cx: &mut gpui::AsyncApp, + ) -> Result { + match response { + Ok((data, usage)) => { + if let Some(usage) = usage { + this.update(cx, |this, cx| { + this.user_store.update(cx, |user_store, cx| { + user_store.update_edit_prediction_usage(usage, cx); + }); + }) + .ok(); + } + Ok(data) + } + Err(err) => { + if err.is::() { + cx.update(|cx| { + this.update(cx, |this, _cx| { + this.update_required = true; + }) + .ok(); + + let error_message: SharedString = err.to_string().into(); + show_app_notification( + NotificationId::unique::(), + cx, + move |cx| { + cx.new(|cx| { + ErrorMessagePrompt::new(error_message.clone(), cx) + .with_link_button("Update Zed", "https://zed.dev/releases") + }) + }, + ); + }) + .ok(); + } + Err(err) + } + } } - fn suggest( - &self, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &mut App, - ) -> Option { - self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx)) - } -} - -/// Returns edits updated based on user edits since the old snapshot. None is returned if any user -/// edit is not a prefix of a predicted insertion. -pub fn interpolate_edits( - old_snapshot: &BufferSnapshot, - new_snapshot: &BufferSnapshot, - current_edits: &[(Range, Arc)], -) -> Option, Arc)>> { - let mut edits = Vec::new(); - - let mut model_edits = current_edits.iter().peekable(); - for user_edit in new_snapshot.edits_since::(&old_snapshot.version) { - while let Some((model_old_range, _)) = model_edits.peek() { - let model_old_range = model_old_range.to_offset(old_snapshot); - if model_old_range.end < user_edit.old.start { - let (model_old_range, model_new_text) = model_edits.next().unwrap(); - edits.push((model_old_range.clone(), model_new_text.clone())); + async fn send_api_request( + build: impl Fn(http_client::http::request::Builder) -> Result>, + client: Arc, + llm_token: LlmApiToken, + app_version: Version, + require_auth: bool, + ) -> Result<(Res, Option)> + where + Res: DeserializeOwned, + { + let http_client = client.http_client(); + + let mut token = if require_auth { + Some(llm_token.acquire(&client).await?) + } else { + llm_token.acquire(&client).await.ok() + }; + let mut did_retry = false; + + loop { + let request_builder = http_client::Request::builder().method(Method::POST); + + let mut request_builder = request_builder + .header("Content-Type", "application/json") + .header(ZED_VERSION_HEADER_NAME, app_version.to_string()); + + // Only add Authorization header if we have a token + if let Some(ref token_value) = token { + request_builder = + request_builder.header("Authorization", format!("Bearer {}", token_value)); + } + + let request = build(request_builder)?; + + let mut response = http_client.send(request).await?; + + if let Some(minimum_required_version) = response + .headers() + .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME) + .and_then(|version| Version::from_str(version.to_str().ok()?).ok()) + { + anyhow::ensure!( + app_version >= minimum_required_version, + ZedUpdateRequiredError { + minimum_version: minimum_required_version + } + ); + } + + if response.status().is_success() { + let usage = EditPredictionUsage::from_headers(response.headers()).ok(); + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + return Ok((serde_json::from_slice(&body)?, usage)); + } else if !did_retry + && token.is_some() + && response + .headers() + .get(EXPIRED_LLM_TOKEN_HEADER_NAME) + .is_some() + { + did_retry = true; + token = Some(llm_token.refresh(&client).await?); } else { - break; + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!( + "Request failed with status: {:?}\nBody: {}", + response.status(), + body + ); } } + } + + pub fn refresh_context( + &mut self, + project: &Entity, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut Context, + ) { + if self.use_context { + self.get_or_init_project(project, cx) + .context + .update(cx, |store, cx| { + store.refresh(buffer.clone(), cursor_position, cx); + }); + } + } - if let Some((model_old_range, model_new_text)) = model_edits.peek() { - let model_old_offset_range = model_old_range.to_offset(old_snapshot); - if user_edit.old == model_old_offset_range { - let user_new_text = new_snapshot - .text_for_range(user_edit.new.clone()) - .collect::(); + #[cfg(feature = "cli-support")] + pub fn set_context_for_buffer( + &mut self, + project: &Entity, + related_files: Vec, + cx: &mut Context, + ) { + self.get_or_init_project(project, cx) + .context + .update(cx, |store, _| { + store.set_related_files(related_files); + }); + } - if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) { - if !model_suffix.is_empty() { - let anchor = old_snapshot.anchor_after(user_edit.old.end); - edits.push((anchor..anchor, model_suffix.into())); - } + fn is_file_open_source( + &self, + project: &Entity, + file: &Arc, + cx: &App, + ) -> bool { + if !file.is_local() || file.is_private() { + return false; + } + let Some(project_state) = self.projects.get(&project.entity_id()) else { + return false; + }; + project_state + .license_detection_watchers + .get(&file.worktree_id(cx)) + .as_ref() + .is_some_and(|watcher| watcher.is_project_open_source()) + } + + fn can_collect_file(&self, project: &Entity, file: &Arc, cx: &App) -> bool { + self.data_collection_choice.is_enabled() && self.is_file_open_source(project, file, cx) + } - model_edits.next(); - continue; + fn can_collect_events(&self, events: &[Arc]) -> bool { + if !self.data_collection_choice.is_enabled() { + return false; + } + events.iter().all(|event| { + matches!( + event.as_ref(), + zeta_prompt::Event::BufferChange { + in_open_source_repo: true, + .. } + ) + }) + } + + fn load_data_collection_choice() -> DataCollectionChoice { + let choice = KEY_VALUE_STORE + .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE) + .log_err() + .flatten(); + + match choice.as_deref() { + Some("true") => DataCollectionChoice::Enabled, + Some("false") => DataCollectionChoice::Disabled, + Some(_) => { + log::error!("unknown value in '{ZED_PREDICT_DATA_COLLECTION_CHOICE}'"); + DataCollectionChoice::NotAnswered } + None => DataCollectionChoice::NotAnswered, + } + } + + fn toggle_data_collection_choice(&mut self, cx: &mut Context) { + self.data_collection_choice = self.data_collection_choice.toggle(); + let new_choice = self.data_collection_choice; + db::write_and_log(cx, move || { + KEY_VALUE_STORE.write_kvp( + ZED_PREDICT_DATA_COLLECTION_CHOICE.into(), + new_choice.is_enabled().to_string(), + ) + }); + } + + pub fn shown_predictions(&self) -> impl DoubleEndedIterator { + self.shown_predictions.iter() + } + + pub fn shown_completions_len(&self) -> usize { + self.shown_predictions.len() + } + + pub fn is_prediction_rated(&self, id: &EditPredictionId) -> bool { + self.rated_predictions.contains(id) + } + + pub fn rate_prediction( + &mut self, + prediction: &EditPrediction, + rating: EditPredictionRating, + feedback: String, + cx: &mut Context, + ) { + self.rated_predictions.insert(prediction.id.clone()); + telemetry::event!( + "Edit Prediction Rated", + rating, + inputs = prediction.inputs, + output = prediction.edit_preview.as_unified_diff(&prediction.edits), + feedback + ); + self.client.telemetry().flush_events().detach(); + cx.notify(); + } + + fn configure_context_retrieval(&mut self, cx: &mut Context<'_, EditPredictionStore>) { + self.use_context = cx.has_flag::() + && all_language_settings(None, cx).edit_predictions.use_context; + } +} + +#[derive(Error, Debug)] +#[error( + "You must update to Zed version {minimum_version} or higher to continue using edit predictions." +)] +pub struct ZedUpdateRequiredError { + minimum_version: Version, +} + +#[cfg(feature = "cli-support")] +pub type EvalCacheKey = (EvalCacheEntryKind, u64); + +#[cfg(feature = "cli-support")] +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum EvalCacheEntryKind { + Context, + Search, + Prediction, +} + +#[cfg(feature = "cli-support")] +impl std::fmt::Display for EvalCacheEntryKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + EvalCacheEntryKind::Search => write!(f, "search"), + EvalCacheEntryKind::Context => write!(f, "context"), + EvalCacheEntryKind::Prediction => write!(f, "prediction"), } + } +} + +#[cfg(feature = "cli-support")] +pub trait EvalCache: Send + Sync { + fn read(&self, key: EvalCacheKey) -> Option; + fn write(&self, key: EvalCacheKey, input: &str, value: &str); +} + +#[derive(Debug, Clone, Copy)] +pub enum DataCollectionChoice { + NotAnswered, + Enabled, + Disabled, +} + +impl DataCollectionChoice { + pub fn is_enabled(self) -> bool { + match self { + Self::Enabled => true, + Self::NotAnswered | Self::Disabled => false, + } + } + + pub fn is_answered(self) -> bool { + match self { + Self::Enabled | Self::Disabled => true, + Self::NotAnswered => false, + } + } + + #[must_use] + pub fn toggle(&self) -> DataCollectionChoice { + match self { + Self::Enabled => Self::Disabled, + Self::Disabled => Self::Enabled, + Self::NotAnswered => Self::Enabled, + } + } +} - return None; +impl From for DataCollectionChoice { + fn from(value: bool) -> Self { + match value { + true => DataCollectionChoice::Enabled, + false => DataCollectionChoice::Disabled, + } } +} + +struct ZedPredictUpsell; + +impl Dismissable for ZedPredictUpsell { + const KEY: &'static str = "dismissed-edit-predict-upsell"; + + fn dismissed() -> bool { + // To make this backwards compatible with older versions of Zed, we + // check if the user has seen the previous Edit Prediction Onboarding + // before, by checking the data collection choice which was written to + // the database once the user clicked on "Accept and Enable" + if KEY_VALUE_STORE + .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE) + .log_err() + .is_some_and(|s| s.is_some()) + { + return true; + } + + KEY_VALUE_STORE + .read_kvp(Self::KEY) + .log_err() + .is_some_and(|s| s.is_some()) + } +} + +pub fn should_show_upsell_modal() -> bool { + !ZedPredictUpsell::dismissed() +} - edits.extend(model_edits.cloned()); +pub fn init(cx: &mut App) { + cx.observe_new(move |workspace: &mut Workspace, _, _cx| { + workspace.register_action( + move |workspace, _: &zed_actions::OpenZedPredictOnboarding, window, cx| { + ZedPredictModal::toggle( + workspace, + workspace.user_store().clone(), + workspace.client().clone(), + window, + cx, + ) + }, + ); - if edits.is_empty() { None } else { Some(edits) } + workspace.register_action(|workspace, _: &ResetOnboarding, _window, cx| { + update_settings_file(workspace.app_state().fs.clone(), cx, move |settings, _| { + settings + .project + .all_languages + .features + .get_or_insert_default() + .edit_prediction_provider = Some(EditPredictionProvider::None) + }); + }); + }) + .detach(); } diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..eee3f1f79e93b60ee3ea7c80bd987af22d613833 --- /dev/null +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -0,0 +1,2088 @@ +use super::*; +use crate::{udiff::apply_diff_to_string, zeta1::MAX_EVENT_TOKENS}; +use client::{UserStore, test::FakeServer}; +use clock::{FakeSystemClock, ReplicaId}; +use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; +use cloud_llm_client::{ + EditPredictionRejectReason, EditPredictionRejection, PredictEditsBody, PredictEditsResponse, + RejectEditPredictionsBody, +}; +use futures::{ + AsyncReadExt, StreamExt, + channel::{mpsc, oneshot}, +}; +use gpui::{ + Entity, TestAppContext, + http_client::{FakeHttpClient, Response}, +}; +use indoc::indoc; +use language::{Point, ToOffset as _}; +use lsp::LanguageServerId; +use open_ai::Usage; +use parking_lot::Mutex; +use pretty_assertions::{assert_eq, assert_matches}; +use project::{FakeFs, Project}; +use serde_json::json; +use settings::SettingsStore; +use std::{path::Path, sync::Arc, time::Duration}; +use util::{path, rel_path::rel_path}; +use uuid::Uuid; +use zeta_prompt::ZetaPromptInput; + +use crate::{BufferEditPrediction, EditPredictionId, EditPredictionStore, REJECT_REQUEST_DEBOUNCE}; + +#[gpui::test] +async fn test_current_state(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "1.txt": "Hello!\nHow\nBye\n", + "2.txt": "Hola!\nComo\nAdios\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer1 = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("/root/1.txt"), cx).unwrap(); + project.set_active_path(Some(path.clone()), cx); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot1.anchor_before(language::Point::new(1, 3)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_project(&project, cx); + ep_store.register_buffer(&buffer1, &project, cx); + }); + + // Prediction for current file + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer1.clone(), position, cx) + }); + let (request, respond_tx) = requests.predict.next().await.unwrap(); + + respond_tx + .send(model_response( + request, + indoc! {r" + --- a/root/1.txt + +++ b/root/1.txt + @@ ... @@ + Hello! + -How + +How are you? + Bye + "}, + )) + .unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + let prediction = ep_store + .prediction_at(&buffer1, None, &project, cx) + .unwrap(); + assert_matches!(prediction, BufferEditPrediction::Local { .. }); + }); + + ep_store.update(cx, |ep_store, _cx| { + ep_store.reject_current_prediction(EditPredictionRejectReason::Discarded, &project); + }); + + // Prediction for diagnostic in another file + + let diagnostic = lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "Sentence is incomplete".to_string(), + ..Default::default() + }; + + project.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store + .update_diagnostics( + LanguageServerId(0), + lsp::PublishDiagnosticsParams { + uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(), + diagnostics: vec![diagnostic], + version: None, + }, + None, + language::DiagnosticSourceKind::Pushed, + &[], + cx, + ) + .unwrap(); + }); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + respond_tx + .send(model_response( + request, + indoc! {r#" + --- a/root/2.txt + +++ b/root/2.txt + @@ ... @@ + Hola! + -Como + +Como estas? + Adios + "#}, + )) + .unwrap(); + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + let prediction = ep_store + .prediction_at(&buffer1, None, &project, cx) + .unwrap(); + assert_matches!( + prediction, + BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt")) + ); + }); + + let buffer2 = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/2.txt"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + ep_store.update(cx, |ep_store, cx| { + let prediction = ep_store + .prediction_at(&buffer2, None, &project, cx) + .unwrap(); + assert_matches!(prediction, BufferEditPrediction::Local { .. }); + }); +} + +#[gpui::test] +async fn test_simple_request(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + let prediction_task = ep_store.update(cx, |ep_store, cx| { + ep_store.request_prediction(&project, &buffer, position, Default::default(), cx) + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + + // TODO Put back when we have a structured request again + // assert_eq!( + // request.excerpt_path.as_ref(), + // Path::new(path!("root/foo.md")) + // ); + // assert_eq!( + // request.cursor_point, + // Point { + // line: Line(1), + // column: 3 + // } + // ); + + respond_tx + .send(model_response( + request, + indoc! { r" + --- a/root/foo.md + +++ b/root/foo.md + @@ ... @@ + Hello! + -How + +How are you? + Bye + "}, + )) + .unwrap(); + + let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap(); + + assert_eq!(prediction.edits.len(), 1); + assert_eq!( + prediction.edits[0].0.to_point(&snapshot).start, + language::Point::new(1, 3) + ); + assert_eq!(prediction.edits[0].1.as_ref(), " are you?"); +} + +#[gpui::test] +async fn test_request_events(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\n\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + }); + + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(7..7, "How")], None, cx); + }); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + let prediction_task = ep_store.update(cx, |ep_store, cx| { + ep_store.request_prediction(&project, &buffer, position, Default::default(), cx) + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + + let prompt = prompt_from_request(&request); + assert!( + prompt.contains(indoc! {" + --- a/root/foo.md + +++ b/root/foo.md + @@ -1,3 +1,3 @@ + Hello! + - + +How + Bye + "}), + "{prompt}" + ); + + respond_tx + .send(model_response( + request, + indoc! {r#" + --- a/root/foo.md + +++ b/root/foo.md + @@ ... @@ + Hello! + -How + +How are you? + Bye + "#}, + )) + .unwrap(); + + let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap(); + + assert_eq!(prediction.edits.len(), 1); + assert_eq!(prediction.edits[0].1.as_ref(), " are you?"); +} + +#[gpui::test] +async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContext) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\n\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + }); + + // First burst: insert "How" + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(7..7, "How")], None, cx); + }); + + // Simulate a pause longer than the grouping threshold (e.g. 500ms). + cx.executor().advance_clock(LAST_CHANGE_GROUPING_TIME * 2); + cx.run_until_parked(); + + // Second burst: append " are you?" immediately after "How" on the same line. + // + // Keeping both bursts on the same line ensures the existing line-span coalescing logic + // groups them into a single `LastEvent`, allowing the pause-split getter to return two diffs. + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(10..10, " are you?")], None, cx); + }); + + // A second edit shortly after the first post-pause edit ensures the last edit timestamp is + // advanced after the pause boundary is recorded, making pause-splitting deterministic. + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(19..19, "!")], None, cx); + }); + + // Without time-based splitting, there is one event. + let events = ep_store.update(cx, |ep_store, cx| { + ep_store.edit_history_for_project(&project, cx) + }); + assert_eq!(events.len(), 1); + let zeta_prompt::Event::BufferChange { diff, .. } = events[0].as_ref(); + assert_eq!( + diff.as_str(), + indoc! {" + @@ -1,3 +1,3 @@ + Hello! + - + +How are you?! + Bye + "} + ); + + // With time-based splitting, there are two distinct events. + let events = ep_store.update(cx, |ep_store, cx| { + ep_store.edit_history_for_project_with_pause_split_last_event(&project, cx) + }); + assert_eq!(events.len(), 2); + let zeta_prompt::Event::BufferChange { diff, .. } = events[0].as_ref(); + assert_eq!( + diff.as_str(), + indoc! {" + @@ -1,3 +1,3 @@ + Hello! + - + +How + Bye + "} + ); + + let zeta_prompt::Event::BufferChange { diff, .. } = events[1].as_ref(); + assert_eq!( + diff.as_str(), + indoc! {" + @@ -1,3 +1,3 @@ + Hello! + -How + +How are you?! + Bye + "} + ); +} + +#[gpui::test] +async fn test_empty_prediction(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + let response = model_response(request, ""); + let id = response.id.clone(); + respond_tx.send(response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + assert!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .is_none() + ); + }); + + // prediction is reported as rejected + let (reject_request, _) = requests.reject.next().await.unwrap(); + + assert_eq!( + &reject_request.rejections, + &[EditPredictionRejection { + request_id: id, + reason: EditPredictionRejectReason::Empty, + was_shown: false + }] + ); +} + +#[gpui::test] +async fn test_interpolated_empty(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + + buffer.update(cx, |buffer, cx| { + buffer.set_text("Hello!\nHow are you?\nBye", cx); + }); + + let response = model_response(request, SIMPLE_DIFF); + let id = response.id.clone(); + respond_tx.send(response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + assert!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .is_none() + ); + }); + + // prediction is reported as rejected + let (reject_request, _) = requests.reject.next().await.unwrap(); + + assert_eq!( + &reject_request.rejections, + &[EditPredictionRejection { + request_id: id, + reason: EditPredictionRejectReason::InterpolatedEmpty, + was_shown: false + }] + ); +} + +const SIMPLE_DIFF: &str = indoc! { r" + --- a/root/foo.md + +++ b/root/foo.md + @@ ... @@ + Hello! + -How + +How are you? + Bye +"}; + +#[gpui::test] +async fn test_replace_current(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + let first_response = model_response(request, SIMPLE_DIFF); + let first_id = first_response.id.clone(); + respond_tx.send(first_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + first_id + ); + }); + + // a second request is triggered + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + let second_response = model_response(request, SIMPLE_DIFF); + let second_id = second_response.id.clone(); + respond_tx.send(second_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // second replaces first + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + second_id + ); + }); + + // first is reported as replaced + let (reject_request, _) = requests.reject.next().await.unwrap(); + + assert_eq!( + &reject_request.rejections, + &[EditPredictionRejection { + request_id: first_id, + reason: EditPredictionRejectReason::Replaced, + was_shown: false + }] + ); +} + +#[gpui::test] +async fn test_current_preferred(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + let first_response = model_response(request, SIMPLE_DIFF); + let first_id = first_response.id.clone(); + respond_tx.send(first_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + first_id + ); + }); + + // a second request is triggered + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + // worse than current prediction + let second_response = model_response( + request, + indoc! { r" + --- a/root/foo.md + +++ b/root/foo.md + @@ ... @@ + Hello! + -How + +How are + Bye + "}, + ); + let second_id = second_response.id.clone(); + respond_tx.send(second_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // first is preferred over second + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + first_id + ); + }); + + // second is reported as rejected + let (reject_request, _) = requests.reject.next().await.unwrap(); + + assert_eq!( + &reject_request.rejections, + &[EditPredictionRejection { + request_id: second_id, + reason: EditPredictionRejectReason::CurrentPreferred, + was_shown: false + }] + ); +} + +#[gpui::test] +async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + // start two refresh tasks + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request1, respond_first) = requests.predict.next().await.unwrap(); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_second) = requests.predict.next().await.unwrap(); + + // wait for throttle + cx.run_until_parked(); + + // second responds first + let second_response = model_response(request, SIMPLE_DIFF); + let second_id = second_response.id.clone(); + respond_second.send(second_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // current prediction is second + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + second_id + ); + }); + + let first_response = model_response(request1, SIMPLE_DIFF); + let first_id = first_response.id.clone(); + respond_first.send(first_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // current prediction is still second, since first was cancelled + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + second_id + ); + }); + + // first is reported as rejected + let (reject_request, _) = requests.reject.next().await.unwrap(); + + cx.run_until_parked(); + + assert_eq!( + &reject_request.rejections, + &[EditPredictionRejection { + request_id: first_id, + reason: EditPredictionRejectReason::Canceled, + was_shown: false + }] + ); +} + +#[gpui::test] +async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + // start two refresh tasks + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request1, respond_first) = requests.predict.next().await.unwrap(); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request2, respond_second) = requests.predict.next().await.unwrap(); + + // wait for throttle, so requests are sent + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // start a third request + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + + // 2 are pending, so 2nd is cancelled + assert_eq!( + ep_store + .get_or_init_project(&project, cx) + .cancelled_predictions + .iter() + .copied() + .collect::>(), + [1] + ); + }); + + // wait for throttle + cx.run_until_parked(); + + let (request3, respond_third) = requests.predict.next().await.unwrap(); + + let first_response = model_response(request1, SIMPLE_DIFF); + let first_id = first_response.id.clone(); + respond_first.send(first_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // current prediction is first + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + first_id + ); + }); + + let cancelled_response = model_response(request2, SIMPLE_DIFF); + let cancelled_id = cancelled_response.id.clone(); + respond_second.send(cancelled_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // current prediction is still first, since second was cancelled + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + first_id + ); + }); + + let third_response = model_response(request3, SIMPLE_DIFF); + let third_response_id = third_response.id.clone(); + respond_third.send(third_response).unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + // third completes and replaces first + assert_eq!( + ep_store + .prediction_at(&buffer, None, &project, cx) + .unwrap() + .id + .0, + third_response_id + ); + }); + + // second is reported as rejected + let (reject_request, _) = requests.reject.next().await.unwrap(); + + cx.run_until_parked(); + + assert_eq!( + &reject_request.rejections, + &[ + EditPredictionRejection { + request_id: cancelled_id, + reason: EditPredictionRejectReason::Canceled, + was_shown: false + }, + EditPredictionRejection { + request_id: first_id, + reason: EditPredictionRejectReason::Replaced, + was_shown: false + } + ] + ); +} + +#[gpui::test] +async fn test_rejections_flushing(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + + ep_store.update(cx, |ep_store, _cx| { + ep_store.reject_prediction( + EditPredictionId("test-1".into()), + EditPredictionRejectReason::Discarded, + false, + ); + ep_store.reject_prediction( + EditPredictionId("test-2".into()), + EditPredictionRejectReason::Canceled, + true, + ); + }); + + cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE); + cx.run_until_parked(); + + let (reject_request, respond_tx) = requests.reject.next().await.unwrap(); + respond_tx.send(()).unwrap(); + + // batched + assert_eq!(reject_request.rejections.len(), 2); + assert_eq!( + reject_request.rejections[0], + EditPredictionRejection { + request_id: "test-1".to_string(), + reason: EditPredictionRejectReason::Discarded, + was_shown: false + } + ); + assert_eq!( + reject_request.rejections[1], + EditPredictionRejection { + request_id: "test-2".to_string(), + reason: EditPredictionRejectReason::Canceled, + was_shown: true + } + ); + + // Reaching batch size limit sends without debounce + ep_store.update(cx, |ep_store, _cx| { + for i in 0..70 { + ep_store.reject_prediction( + EditPredictionId(format!("batch-{}", i).into()), + EditPredictionRejectReason::Discarded, + false, + ); + } + }); + + // First MAX/2 items are sent immediately + cx.run_until_parked(); + let (reject_request, respond_tx) = requests.reject.next().await.unwrap(); + respond_tx.send(()).unwrap(); + + assert_eq!(reject_request.rejections.len(), 50); + assert_eq!(reject_request.rejections[0].request_id, "batch-0"); + assert_eq!(reject_request.rejections[49].request_id, "batch-49"); + + // Remaining items are debounced with the next batch + cx.executor().advance_clock(Duration::from_secs(15)); + cx.run_until_parked(); + + let (reject_request, respond_tx) = requests.reject.next().await.unwrap(); + respond_tx.send(()).unwrap(); + + assert_eq!(reject_request.rejections.len(), 20); + assert_eq!(reject_request.rejections[0].request_id, "batch-50"); + assert_eq!(reject_request.rejections[19].request_id, "batch-69"); + + // Request failure + ep_store.update(cx, |ep_store, _cx| { + ep_store.reject_prediction( + EditPredictionId("retry-1".into()), + EditPredictionRejectReason::Discarded, + false, + ); + }); + + cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE); + cx.run_until_parked(); + + let (reject_request, _respond_tx) = requests.reject.next().await.unwrap(); + assert_eq!(reject_request.rejections.len(), 1); + assert_eq!(reject_request.rejections[0].request_id, "retry-1"); + // Simulate failure + drop(_respond_tx); + + // Add another rejection + ep_store.update(cx, |ep_store, _cx| { + ep_store.reject_prediction( + EditPredictionId("retry-2".into()), + EditPredictionRejectReason::Discarded, + false, + ); + }); + + cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE); + cx.run_until_parked(); + + // Retry should include both the failed item and the new one + let (reject_request, respond_tx) = requests.reject.next().await.unwrap(); + respond_tx.send(()).unwrap(); + + assert_eq!(reject_request.rejections.len(), 2); + assert_eq!(reject_request.rejections[0].request_id, "retry-1"); + assert_eq!(reject_request.rejections[1].request_id, "retry-2"); +} + +// Skipped until we start including diagnostics in prompt +// #[gpui::test] +// async fn test_request_diagnostics(cx: &mut TestAppContext) { +// let (ep_store, mut req_rx) = init_test_with_fake_client(cx); +// let fs = FakeFs::new(cx.executor()); +// fs.insert_tree( +// "/root", +// json!({ +// "foo.md": "Hello!\nBye" +// }), +// ) +// .await; +// let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + +// let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap(); +// let diagnostic = lsp::Diagnostic { +// range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)), +// severity: Some(lsp::DiagnosticSeverity::ERROR), +// message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(), +// ..Default::default() +// }; + +// project.update(cx, |project, cx| { +// project.lsp_store().update(cx, |lsp_store, cx| { +// // Create some diagnostics +// lsp_store +// .update_diagnostics( +// LanguageServerId(0), +// lsp::PublishDiagnosticsParams { +// uri: path_to_buffer_uri.clone(), +// diagnostics: vec![diagnostic], +// version: None, +// }, +// None, +// language::DiagnosticSourceKind::Pushed, +// &[], +// cx, +// ) +// .unwrap(); +// }); +// }); + +// let buffer = project +// .update(cx, |project, cx| { +// let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); +// project.open_buffer(path, cx) +// }) +// .await +// .unwrap(); + +// let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); +// let position = snapshot.anchor_before(language::Point::new(0, 0)); + +// let _prediction_task = ep_store.update(cx, |ep_store, cx| { +// ep_store.request_prediction(&project, &buffer, position, cx) +// }); + +// let (request, _respond_tx) = req_rx.next().await.unwrap(); + +// assert_eq!(request.diagnostic_groups.len(), 1); +// let value = serde_json::from_str::(request.diagnostic_groups[0].0.get()) +// .unwrap(); +// // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3 +// assert_eq!( +// value, +// json!({ +// "entries": [{ +// "range": { +// "start": 8, +// "end": 10 +// }, +// "diagnostic": { +// "source": null, +// "code": null, +// "code_description": null, +// "severity": 1, +// "message": "\"Hello\" deprecated. Use \"Hi\" instead", +// "markdown": null, +// "group_id": 0, +// "is_primary": true, +// "is_disk_based": false, +// "is_unnecessary": false, +// "source_kind": "Pushed", +// "data": null, +// "underline": true +// } +// }], +// "primary_ix": 0 +// }) +// ); +// } + +// Generate a model response that would apply the given diff to the active file. +fn model_response(request: open_ai::Request, diff_to_apply: &str) -> open_ai::Response { + let prompt = match &request.messages[0] { + open_ai::RequestMessage::User { + content: open_ai::MessageContent::Plain(content), + } => content, + _ => panic!("unexpected request {request:?}"), + }; + + let open = "\n"; + let close = ""; + let cursor = "<|user_cursor|>"; + + let start_ix = open.len() + prompt.find(open).unwrap(); + let end_ix = start_ix + &prompt[start_ix..].find(close).unwrap(); + let excerpt = prompt[start_ix..end_ix].replace(cursor, ""); + let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap(); + + open_ai::Response { + id: Uuid::new_v4().to_string(), + object: "response".into(), + created: 0, + model: "model".into(), + choices: vec![open_ai::Choice { + index: 0, + message: open_ai::RequestMessage::Assistant { + content: Some(open_ai::MessageContent::Plain(new_excerpt)), + tool_calls: vec![], + }, + finish_reason: None, + }], + usage: Usage { + prompt_tokens: 0, + completion_tokens: 0, + total_tokens: 0, + }, + } +} + +fn prompt_from_request(request: &open_ai::Request) -> &str { + assert_eq!(request.messages.len(), 1); + let open_ai::RequestMessage::User { + content: open_ai::MessageContent::Plain(content), + .. + } = &request.messages[0] + else { + panic!( + "Request does not have single user message of type Plain. {:#?}", + request + ); + }; + content +} + +struct RequestChannels { + predict: mpsc::UnboundedReceiver<(open_ai::Request, oneshot::Sender)>, + reject: mpsc::UnboundedReceiver<(RejectEditPredictionsBody, oneshot::Sender<()>)>, +} + +fn init_test_with_fake_client( + cx: &mut TestAppContext, +) -> (Entity, RequestChannels) { + cx.update(move |cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + zlog::init_test(); + + let (predict_req_tx, predict_req_rx) = mpsc::unbounded(); + let (reject_req_tx, reject_req_rx) = mpsc::unbounded(); + + let http_client = FakeHttpClient::create({ + move |req| { + let uri = req.uri().path().to_string(); + let mut body = req.into_body(); + let predict_req_tx = predict_req_tx.clone(); + let reject_req_tx = reject_req_tx.clone(); + async move { + let resp = match uri.as_str() { + "/client/llm_tokens" => serde_json::to_string(&json!({ + "token": "test" + })) + .unwrap(), + "/predict_edits/raw" => { + let mut buf = Vec::new(); + body.read_to_end(&mut buf).await.ok(); + let req = serde_json::from_slice(&buf).unwrap(); + + let (res_tx, res_rx) = oneshot::channel(); + predict_req_tx.unbounded_send((req, res_tx)).unwrap(); + serde_json::to_string(&res_rx.await?).unwrap() + } + "/predict_edits/reject" => { + let mut buf = Vec::new(); + body.read_to_end(&mut buf).await.ok(); + let req = serde_json::from_slice(&buf).unwrap(); + + let (res_tx, res_rx) = oneshot::channel(); + reject_req_tx.unbounded_send((req, res_tx)).unwrap(); + serde_json::to_string(&res_rx.await?).unwrap() + } + _ => { + panic!("Unexpected path: {}", uri) + } + }; + + Ok(Response::builder().body(resp.into()).unwrap()) + } + } + }); + + let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); + client.cloud_client().set_credentials(1, "test".into()); + + language_model::init(client.clone(), cx); + + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + let ep_store = EditPredictionStore::global(&client, &user_store, cx); + + ( + ep_store, + RequestChannels { + predict: predict_req_rx, + reject: reject_req_rx, + }, + ) + }) +} + +const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt"); + +#[gpui::test] +async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { + let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx)); + let edits: Arc<[(Range, Arc)]> = cx.update(|cx| { + to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into() + }); + + let edit_preview = cx + .read(|cx| buffer.read(cx).preview_edits(edits.clone(), cx)) + .await; + + let prediction = EditPrediction { + edits, + edit_preview, + buffer: buffer.clone(), + snapshot: cx.read(|cx| buffer.read(cx).snapshot()), + id: EditPredictionId("the-id".into()), + inputs: ZetaPromptInput { + events: Default::default(), + related_files: Default::default(), + cursor_path: Path::new("").into(), + cursor_excerpt: "".into(), + editable_range_in_excerpt: 0..0, + cursor_offset_in_excerpt: 0, + }, + buffer_snapshotted_at: Instant::now(), + response_received_at: Instant::now(), + }; + + cx.update(|cx| { + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..5, "REM".into()), (9..11, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..2, "REM".into()), (6..8, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.undo(cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(2..5, "REM".into()), (9..11, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(3..3, "EM".into()), (7..9, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".into()), (8..10, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(9..11, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".into()), (8..10, "".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx)); + assert_eq!( + from_completion_edits( + &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(), + &buffer, + cx + ), + vec![(4..4, "M".into())] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx)); + assert_eq!(prediction.interpolate(&buffer.read(cx).snapshot()), None); + }) +} + +#[gpui::test] +async fn test_clean_up_diff(cx: &mut TestAppContext) { + init_test(cx); + + assert_eq!( + apply_edit_prediction( + indoc! {" + fn main() { + let word_1 = \"lorem\"; + let range = word.len()..word.len(); + } + "}, + indoc! {" + <|editable_region_start|> + fn main() { + let word_1 = \"lorem\"; + let range = word_1.len()..word_1.len(); + } + + <|editable_region_end|> + "}, + cx, + ) + .await, + indoc! {" + fn main() { + let word_1 = \"lorem\"; + let range = word_1.len()..word_1.len(); + } + "}, + ); + + assert_eq!( + apply_edit_prediction( + indoc! {" + fn main() { + let story = \"the quick\" + } + "}, + indoc! {" + <|editable_region_start|> + fn main() { + let story = \"the quick brown fox jumps over the lazy dog\"; + } + + <|editable_region_end|> + "}, + cx, + ) + .await, + indoc! {" + fn main() { + let story = \"the quick brown fox jumps over the lazy dog\"; + } + "}, + ); +} + +#[gpui::test] +async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let buffer_content = "lorem\n"; + let completion_response = indoc! {" + ```animals.js + <|start_of_file|> + <|editable_region_start|> + lorem + ipsum + <|editable_region_end|> + ```"}; + + assert_eq!( + apply_edit_prediction(buffer_content, completion_response, cx).await, + "lorem\nipsum" + ); +} + +#[gpui::test] +async fn test_can_collect_data(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree(path!("/project"), json!({ "LICENSE": BSD_0_TXT })) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/src/main.rs"), cx) + }) + .await + .unwrap(); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Disabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_no_data_collection_for_remote_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let buffer = cx.new(|_cx| { + Buffer::remote( + language::BufferId::new(1).unwrap(), + ReplicaId::new(1), + language::Capability::ReadWrite, + "fn main() {\n println!(\"Hello\");\n}", + ) + }); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_no_data_collection_for_private_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "LICENSE": BSD_0_TXT, + ".env": "SECRET_KEY=secret" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/project/.env", cx) + }) + .await + .unwrap(); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_no_data_collection_for_untitled_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + let buffer = cx.new(|cx| Buffer::local("", cx)); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_no_data_collection_when_closed_source(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree(path!("/project"), json!({ "main.rs": "fn main() {}" })) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/project/main.rs", cx) + }) + .await + .unwrap(); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_data_collection_status_changes_on_move(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/open_source_worktree"), + json!({ "LICENSE": BSD_0_TXT, "main.rs": "" }), + ) + .await; + fs.insert_tree(path!("/closed_source_worktree"), json!({ "main.rs": "" })) + .await; + + let project = Project::test( + fs.clone(), + [ + path!("/open_source_worktree").as_ref(), + path!("/closed_source_worktree").as_ref(), + ], + cx, + ) + .await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/open_source_worktree/main.rs"), cx) + }) + .await + .unwrap(); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + let closed_source_file = project + .update(cx, |project, cx| { + let worktree2 = project + .worktree_for_root_name("closed_source_worktree", cx) + .unwrap(); + worktree2.update(cx, |worktree2, cx| { + worktree2.load_file(rel_path("main.rs"), cx) + }) + }) + .await + .unwrap() + .file; + + buffer.update(cx, |buffer, cx| { + buffer.file_updated(closed_source_file, cx); + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); +} + +#[gpui::test] +async fn test_no_data_collection_for_events_in_uncollectable_buffers(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/worktree1"), + json!({ "LICENSE": BSD_0_TXT, "main.rs": "", "other.rs": "" }), + ) + .await; + fs.insert_tree(path!("/worktree2"), json!({ "private.rs": "" })) + .await; + + let project = Project::test( + fs.clone(), + [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], + cx, + ) + .await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/worktree1/main.rs"), cx) + }) + .await + .unwrap(); + let private_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/worktree2/file.rs"), cx) + }) + .await + .unwrap(); + + let (ep_store, captured_request, _) = make_test_ep_store(&project, cx).await; + ep_store.update(cx, |ep_store, _cx| { + ep_store.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + // this has a side effect of registering the buffer to watch for edits + run_edit_prediction(&private_buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + + private_buffer.update(cx, |private_buffer, cx| { + private_buffer.edit([(0..0, "An edit for the history!")], None, cx); + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + + // make an edit that uses too many bytes, causing private_buffer edit to not be able to be + // included + buffer.update(cx, |buffer, cx| { + buffer.edit( + [( + 0..0, + " ".repeat(MAX_EVENT_TOKENS * cursor_excerpt::BYTES_PER_TOKEN_GUESS), + )], + None, + cx, + ); + }); + + run_edit_prediction(&buffer, &project, &ep_store, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); +} + +fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); +} + +async fn apply_edit_prediction( + buffer_content: &str, + completion_response: &str, + cx: &mut TestAppContext, +) -> String { + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); + let (ep_store, _, response) = make_test_ep_store(&project, cx).await; + *response.lock() = completion_response.to_string(); + let edit_prediction = run_edit_prediction(&buffer, &project, &ep_store, cx).await; + buffer.update(cx, |buffer, cx| { + buffer.edit(edit_prediction.edits.iter().cloned(), None, cx) + }); + buffer.read_with(cx, |buffer, _| buffer.text()) +} + +async fn run_edit_prediction( + buffer: &Entity, + project: &Entity, + ep_store: &Entity, + cx: &mut TestAppContext, +) -> EditPrediction { + let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(buffer, &project, cx) + }); + cx.background_executor.run_until_parked(); + let prediction_task = ep_store.update(cx, |ep_store, cx| { + ep_store.request_prediction(&project, buffer, cursor, Default::default(), cx) + }); + prediction_task.await.unwrap().unwrap().prediction.unwrap() +} + +async fn make_test_ep_store( + project: &Entity, + cx: &mut TestAppContext, +) -> ( + Entity, + Arc>>, + Arc>, +) { + let default_response = indoc! {" + ```main.rs + <|start_of_file|> + <|editable_region_start|> + hello world + <|editable_region_end|> + ```" + }; + let captured_request: Arc>> = Arc::new(Mutex::new(None)); + let completion_response: Arc> = + Arc::new(Mutex::new(default_response.to_string())); + let http_client = FakeHttpClient::create({ + let captured_request = captured_request.clone(); + let completion_response = completion_response.clone(); + let mut next_request_id = 0; + move |req| { + let captured_request = captured_request.clone(); + let completion_response = completion_response.clone(); + async move { + match (req.method(), req.uri().path()) { + (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() + .status(200) + .body( + serde_json::to_string(&CreateLlmTokenResponse { + token: LlmToken("the-llm-token".to_string()), + }) + .unwrap() + .into(), + ) + .unwrap()), + (&Method::POST, "/predict_edits/v2") => { + let mut request_body = String::new(); + req.into_body().read_to_string(&mut request_body).await?; + *captured_request.lock() = + Some(serde_json::from_str(&request_body).unwrap()); + next_request_id += 1; + Ok(http_client::Response::builder() + .status(200) + .body( + serde_json::to_string(&PredictEditsResponse { + request_id: format!("request-{next_request_id}"), + output_excerpt: completion_response.lock().clone(), + }) + .unwrap() + .into(), + ) + .unwrap()) + } + _ => Ok(http_client::Response::builder() + .status(404) + .body("Not Found".into()) + .unwrap()), + } + } + } + }); + + let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + cx.update(|cx| { + RefreshLlmTokenListener::register(client.clone(), cx); + }); + let _server = FakeServer::for_client(42, &client, cx).await; + + let ep_store = cx.new(|cx| { + let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + + let worktrees = project.read(cx).worktrees(cx).collect::>(); + for worktree in worktrees { + let worktree_id = worktree.read(cx).id(); + ep_store + .get_or_init_project(project, cx) + .license_detection_watchers + .entry(worktree_id) + .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx))); + } + + ep_store + }); + + (ep_store, captured_request, completion_response) +} + +fn to_completion_edits( + iterator: impl IntoIterator, Arc)>, + buffer: &Entity, + cx: &App, +) -> Vec<(Range, Arc)> { + let buffer = buffer.read(cx); + iterator + .into_iter() + .map(|(range, text)| { + ( + buffer.anchor_after(range.start)..buffer.anchor_before(range.end), + text, + ) + }) + .collect() +} + +fn from_completion_edits( + editor_edits: &[(Range, Arc)], + buffer: &Entity, + cx: &App, +) -> Vec<(Range, Arc)> { + let buffer = buffer.read(cx); + editor_edits + .iter() + .map(|(range, text)| { + ( + range.start.to_offset(buffer)..range.end.to_offset(buffer), + text.clone(), + ) + }) + .collect() +} + +#[gpui::test] +async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + serde_json::json!({ + "main.rs": "fn main() {\n \n}\n" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + + let http_client = FakeHttpClient::create(|_req| async move { + Ok(gpui::http_client::Response::builder() + .status(401) + .body("Unauthorized".into()) + .unwrap()) + }); + + let client = + cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + cx.update(|cx| { + language_model::RefreshLlmTokenListener::register(client.clone(), cx); + }); + + let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx)); + + let buffer = project + .update(cx, |project, cx| { + let path = project + .find_project_path(path!("/project/main.rs"), cx) + .unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 4))); + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx) + }); + cx.background_executor.run_until_parked(); + + let completion_task = ep_store.update(cx, |ep_store, cx| { + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx) + }); + + let result = completion_task.await; + assert!( + result.is_err(), + "Without authentication and without custom URL, prediction should fail" + ); +} + +#[gpui::test] +async fn test_unauthenticated_with_custom_url_allows_prediction_impl(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + serde_json::json!({ + "main.rs": "fn main() {\n \n}\n" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + + let predict_called = Arc::new(std::sync::atomic::AtomicBool::new(false)); + let predict_called_clone = predict_called.clone(); + + let http_client = FakeHttpClient::create({ + move |req| { + let uri = req.uri().path().to_string(); + let predict_called = predict_called_clone.clone(); + async move { + if uri.contains("predict") { + predict_called.store(true, std::sync::atomic::Ordering::SeqCst); + Ok(gpui::http_client::Response::builder() + .body( + serde_json::to_string(&open_ai::Response { + id: "test-123".to_string(), + object: "chat.completion".to_string(), + created: 0, + model: "test".to_string(), + usage: open_ai::Usage { + prompt_tokens: 0, + completion_tokens: 0, + total_tokens: 0, + }, + choices: vec![open_ai::Choice { + index: 0, + message: open_ai::RequestMessage::Assistant { + content: Some(open_ai::MessageContent::Plain( + indoc! {" + ```main.rs + <|start_of_file|> + <|editable_region_start|> + fn main() { + println!(\"Hello, world!\"); + } + <|editable_region_end|> + ``` + "} + .to_string(), + )), + tool_calls: vec![], + }, + finish_reason: Some("stop".to_string()), + }], + }) + .unwrap() + .into(), + ) + .unwrap()) + } else { + Ok(gpui::http_client::Response::builder() + .status(401) + .body("Unauthorized".into()) + .unwrap()) + } + } + } + }); + + let client = + cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + cx.update(|cx| { + language_model::RefreshLlmTokenListener::register(client.clone(), cx); + }); + + let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx)); + + let buffer = project + .update(cx, |project, cx| { + let path = project + .find_project_path(path!("/project/main.rs"), cx) + .unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 4))); + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx) + }); + cx.background_executor.run_until_parked(); + + let completion_task = ep_store.update(cx, |ep_store, cx| { + ep_store.set_custom_predict_edits_url(Url::parse("http://test/predict").unwrap()); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx) + }); + + let _ = completion_task.await; + + assert!( + predict_called.load(std::sync::atomic::Ordering::SeqCst), + "With custom URL, predict endpoint should be called even without authentication" + ); +} + +#[ctor::ctor] +fn init_logger() { + zlog::init_test(); +} diff --git a/crates/edit_prediction/src/example_spec.rs b/crates/edit_prediction/src/example_spec.rs new file mode 100644 index 0000000000000000000000000000000000000000..bf221b576b890f1200c4ee3c095f73edaea71462 --- /dev/null +++ b/crates/edit_prediction/src/example_spec.rs @@ -0,0 +1,212 @@ +use serde::{Deserialize, Serialize}; +use std::{fmt::Write as _, mem, path::Path, sync::Arc}; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExampleSpec { + #[serde(default)] + pub name: String, + pub repository_url: String, + pub revision: String, + #[serde(default)] + pub uncommitted_diff: String, + pub cursor_path: Arc, + pub cursor_position: String, + pub edit_history: String, + pub expected_patch: String, +} + +const UNCOMMITTED_DIFF_HEADING: &str = "Uncommitted Diff"; +const EDIT_HISTORY_HEADING: &str = "Edit History"; +const CURSOR_POSITION_HEADING: &str = "Cursor Position"; +const EXPECTED_PATCH_HEADING: &str = "Expected Patch"; +const EXPECTED_CONTEXT_HEADING: &str = "Expected Context"; +const REPOSITORY_URL_FIELD: &str = "repository_url"; +const REVISION_FIELD: &str = "revision"; + +impl ExampleSpec { + /// Format this example spec as markdown. + pub fn to_markdown(&self) -> String { + let mut markdown = String::new(); + + _ = writeln!(markdown, "# {}", self.name); + markdown.push('\n'); + + _ = writeln!(markdown, "repository_url = {}", self.repository_url); + _ = writeln!(markdown, "revision = {}", self.revision); + markdown.push('\n'); + + if !self.uncommitted_diff.is_empty() { + _ = writeln!(markdown, "## {}", UNCOMMITTED_DIFF_HEADING); + _ = writeln!(markdown); + _ = writeln!(markdown, "```diff"); + markdown.push_str(&self.uncommitted_diff); + if !markdown.ends_with('\n') { + markdown.push('\n'); + } + _ = writeln!(markdown, "```"); + markdown.push('\n'); + } + + _ = writeln!(markdown, "## {}", EDIT_HISTORY_HEADING); + _ = writeln!(markdown); + + if self.edit_history.is_empty() { + _ = writeln!(markdown, "(No edit history)"); + _ = writeln!(markdown); + } else { + _ = writeln!(markdown, "```diff"); + markdown.push_str(&self.edit_history); + if !markdown.ends_with('\n') { + markdown.push('\n'); + } + _ = writeln!(markdown, "```"); + markdown.push('\n'); + } + + _ = writeln!(markdown, "## {}", CURSOR_POSITION_HEADING); + _ = writeln!(markdown); + _ = writeln!(markdown, "```{}", self.cursor_path.to_string_lossy()); + markdown.push_str(&self.cursor_position); + if !markdown.ends_with('\n') { + markdown.push('\n'); + } + _ = writeln!(markdown, "```"); + markdown.push('\n'); + + _ = writeln!(markdown, "## {}", EXPECTED_PATCH_HEADING); + markdown.push('\n'); + _ = writeln!(markdown, "```diff"); + markdown.push_str(&self.expected_patch); + if !markdown.ends_with('\n') { + markdown.push('\n'); + } + _ = writeln!(markdown, "```"); + markdown.push('\n'); + + markdown + } + + /// Parse an example spec from markdown. + pub fn from_markdown(name: String, input: &str) -> anyhow::Result { + use pulldown_cmark::{CodeBlockKind, CowStr, Event, HeadingLevel, Parser, Tag, TagEnd}; + + let parser = Parser::new(input); + + let mut spec = ExampleSpec { + name, + repository_url: String::new(), + revision: String::new(), + uncommitted_diff: String::new(), + cursor_path: Path::new("").into(), + cursor_position: String::new(), + edit_history: String::new(), + expected_patch: String::new(), + }; + + let mut text = String::new(); + let mut block_info: CowStr = "".into(); + + #[derive(PartialEq)] + enum Section { + Start, + UncommittedDiff, + EditHistory, + CursorPosition, + ExpectedExcerpts, + ExpectedPatch, + Other, + } + + let mut current_section = Section::Start; + + for event in parser { + match event { + Event::Text(line) => { + text.push_str(&line); + + if let Section::Start = current_section + && let Some((field, value)) = line.split_once('=') + { + match field.trim() { + REPOSITORY_URL_FIELD => { + spec.repository_url = value.trim().to_string(); + } + REVISION_FIELD => { + spec.revision = value.trim().to_string(); + } + _ => {} + } + } + } + Event::End(TagEnd::Heading(HeadingLevel::H2)) => { + let title = mem::take(&mut text); + current_section = if title.eq_ignore_ascii_case(UNCOMMITTED_DIFF_HEADING) { + Section::UncommittedDiff + } else if title.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) { + Section::EditHistory + } else if title.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) { + Section::CursorPosition + } else if title.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) { + Section::ExpectedPatch + } else if title.eq_ignore_ascii_case(EXPECTED_CONTEXT_HEADING) { + Section::ExpectedExcerpts + } else { + Section::Other + }; + } + Event::End(TagEnd::Heading(HeadingLevel::H3)) => { + mem::take(&mut text); + } + Event::End(TagEnd::Heading(HeadingLevel::H4)) => { + mem::take(&mut text); + } + Event::End(TagEnd::Heading(level)) => { + anyhow::bail!("Unexpected heading level: {level}"); + } + Event::Start(Tag::CodeBlock(kind)) => { + match kind { + CodeBlockKind::Fenced(info) => { + block_info = info; + } + CodeBlockKind::Indented => { + anyhow::bail!("Unexpected indented codeblock"); + } + }; + } + Event::Start(_) => { + text.clear(); + block_info = "".into(); + } + Event::End(TagEnd::CodeBlock) => { + let block_info = block_info.trim(); + match current_section { + Section::UncommittedDiff => { + spec.uncommitted_diff = mem::take(&mut text); + } + Section::EditHistory => { + spec.edit_history.push_str(&mem::take(&mut text)); + } + Section::CursorPosition => { + spec.cursor_path = Path::new(block_info).into(); + spec.cursor_position = mem::take(&mut text); + } + Section::ExpectedExcerpts => { + mem::take(&mut text); + } + Section::ExpectedPatch => { + spec.expected_patch = mem::take(&mut text); + } + Section::Start | Section::Other => {} + } + } + _ => {} + } + } + + if spec.cursor_path.as_ref() == Path::new("") || spec.cursor_position.is_empty() { + anyhow::bail!("Missing cursor position codeblock"); + } + + Ok(spec) + } +} diff --git a/crates/zeta/src/license_detection.rs b/crates/edit_prediction/src/license_detection.rs similarity index 99% rename from crates/zeta/src/license_detection.rs rename to crates/edit_prediction/src/license_detection.rs index d4d4825615f19e5e5654f7bd78439d9eaa39e4c1..3ad34e7e6df6233cd4ff7462681d7b3588d36534 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/edit_prediction/src/license_detection.rs @@ -735,6 +735,7 @@ mod tests { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -758,6 +759,7 @@ mod tests { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -816,6 +818,7 @@ mod tests { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs new file mode 100644 index 0000000000000000000000000000000000000000..b47bd2ad0374eba33e7b8db726c2fa13c0519465 --- /dev/null +++ b/crates/edit_prediction/src/mercury.rs @@ -0,0 +1,317 @@ +use crate::{ + DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, EditPredictionModelInput, + EditPredictionStartedDebugEvent, open_ai_response::text_from_response, + prediction::EditPredictionResult, +}; +use anyhow::{Context as _, Result}; +use futures::AsyncReadExt as _; +use gpui::{ + App, AppContext as _, Entity, SharedString, Task, + http_client::{self, AsyncBody, Method}, +}; +use language::{OffsetRangeExt as _, ToOffset, ToPoint as _}; +use language_model::{ApiKeyState, EnvVar, env_var}; +use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant}; +use zeta_prompt::ZetaPromptInput; + +const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions"; +const MAX_CONTEXT_TOKENS: usize = 150; +const MAX_REWRITE_TOKENS: usize = 350; + +pub struct Mercury { + pub api_token: Entity, +} + +impl Mercury { + pub fn new(cx: &mut App) -> Self { + Mercury { + api_token: mercury_api_token(cx), + } + } + + pub(crate) fn request_prediction( + &self, + EditPredictionModelInput { + buffer, + snapshot, + position, + events, + related_files, + debug_tx, + .. + }: EditPredictionModelInput, + cx: &mut App, + ) -> Task>> { + self.api_token.update(cx, |key_state, cx| { + _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx); + }); + let Some(api_token) = self.api_token.read(cx).key(&MERCURY_CREDENTIALS_URL) else { + return Task::ready(Ok(None)); + }; + let full_path: Arc = snapshot + .file() + .map(|file| file.full_path(cx)) + .unwrap_or_else(|| "untitled".into()) + .into(); + + let http_client = cx.http_client(); + let cursor_point = position.to_point(&snapshot); + let buffer_snapshotted_at = Instant::now(); + let active_buffer = buffer.clone(); + + let result = cx.background_spawn(async move { + let (editable_range, context_range) = + crate::cursor_excerpt::editable_and_context_ranges_for_cursor_position( + cursor_point, + &snapshot, + MAX_CONTEXT_TOKENS, + MAX_REWRITE_TOKENS, + ); + + let context_offset_range = context_range.to_offset(&snapshot); + + let editable_offset_range = editable_range.to_offset(&snapshot); + + let inputs = zeta_prompt::ZetaPromptInput { + events, + related_files, + cursor_offset_in_excerpt: cursor_point.to_offset(&snapshot) + - context_range.start.to_offset(&snapshot), + cursor_path: full_path.clone(), + cursor_excerpt: snapshot + .text_for_range(context_range) + .collect::() + .into(), + editable_range_in_excerpt: (editable_offset_range.start + - context_offset_range.start) + ..(editable_offset_range.end - context_offset_range.start), + }; + + let prompt = build_prompt(&inputs); + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionStarted( + EditPredictionStartedDebugEvent { + buffer: active_buffer.downgrade(), + prompt: Some(prompt.clone()), + position, + }, + )) + .ok(); + } + + let request_body = open_ai::Request { + model: "mercury-coder".into(), + messages: vec![open_ai::RequestMessage::User { + content: open_ai::MessageContent::Plain(prompt), + }], + stream: false, + max_completion_tokens: None, + stop: vec![], + temperature: None, + tool_choice: None, + parallel_tool_calls: None, + tools: vec![], + prompt_cache_key: None, + reasoning_effort: None, + }; + + let buf = serde_json::to_vec(&request_body)?; + let body: AsyncBody = buf.into(); + + let request = http_client::Request::builder() + .uri(MERCURY_API_URL) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_token)) + .header("Connection", "keep-alive") + .method(Method::POST) + .body(body) + .context("Failed to create request")?; + + let mut response = http_client + .send(request) + .await + .context("Failed to send request")?; + + let mut body: Vec = Vec::new(); + response + .body_mut() + .read_to_end(&mut body) + .await + .context("Failed to read response body")?; + + let response_received_at = Instant::now(); + if !response.status().is_success() { + anyhow::bail!( + "Request failed with status: {:?}\nBody: {}", + response.status(), + String::from_utf8_lossy(&body), + ); + }; + + let mut response: open_ai::Response = + serde_json::from_slice(&body).context("Failed to parse response")?; + + let id = mem::take(&mut response.id); + let response_str = text_from_response(response).unwrap_or_default(); + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionFinished( + EditPredictionFinishedDebugEvent { + buffer: active_buffer.downgrade(), + model_output: Some(response_str.clone()), + position, + }, + )) + .ok(); + } + + let response_str = response_str.strip_prefix("```\n").unwrap_or(&response_str); + let response_str = response_str.strip_suffix("\n```").unwrap_or(&response_str); + + let mut edits = Vec::new(); + const NO_PREDICTION_OUTPUT: &str = "None"; + + if response_str != NO_PREDICTION_OUTPUT { + let old_text = snapshot + .text_for_range(editable_offset_range.clone()) + .collect::(); + edits.extend( + language::text_diff(&old_text, &response_str) + .into_iter() + .map(|(range, text)| { + ( + snapshot.anchor_after(editable_offset_range.start + range.start) + ..snapshot + .anchor_before(editable_offset_range.start + range.end), + text, + ) + }), + ); + } + + anyhow::Ok((id, edits, snapshot, response_received_at, inputs)) + }); + + cx.spawn(async move |cx| { + let (id, edits, old_snapshot, response_received_at, inputs) = + result.await.context("Mercury edit prediction failed")?; + anyhow::Ok(Some( + EditPredictionResult::new( + EditPredictionId(id.into()), + &buffer, + &old_snapshot, + edits.into(), + buffer_snapshotted_at, + response_received_at, + inputs, + cx, + ) + .await, + )) + }) + } +} + +fn build_prompt(inputs: &ZetaPromptInput) -> String { + const RECENTLY_VIEWED_SNIPPETS_START: &str = "<|recently_viewed_code_snippets|>\n"; + const RECENTLY_VIEWED_SNIPPETS_END: &str = "<|/recently_viewed_code_snippets|>\n"; + const RECENTLY_VIEWED_SNIPPET_START: &str = "<|recently_viewed_code_snippet|>\n"; + const RECENTLY_VIEWED_SNIPPET_END: &str = "<|/recently_viewed_code_snippet|>\n"; + const CURRENT_FILE_CONTENT_START: &str = "<|current_file_content|>\n"; + const CURRENT_FILE_CONTENT_END: &str = "<|/current_file_content|>\n"; + const CODE_TO_EDIT_START: &str = "<|code_to_edit|>\n"; + const CODE_TO_EDIT_END: &str = "<|/code_to_edit|>\n"; + const EDIT_DIFF_HISTORY_START: &str = "<|edit_diff_history|>\n"; + const EDIT_DIFF_HISTORY_END: &str = "<|/edit_diff_history|>\n"; + const CURSOR_TAG: &str = "<|cursor|>"; + const CODE_SNIPPET_FILE_PATH_PREFIX: &str = "code_snippet_file_path: "; + const CURRENT_FILE_PATH_PREFIX: &str = "current_file_path: "; + + let mut prompt = String::new(); + + push_delimited( + &mut prompt, + RECENTLY_VIEWED_SNIPPETS_START..RECENTLY_VIEWED_SNIPPETS_END, + |prompt| { + for related_file in inputs.related_files.iter() { + for related_excerpt in &related_file.excerpts { + push_delimited( + prompt, + RECENTLY_VIEWED_SNIPPET_START..RECENTLY_VIEWED_SNIPPET_END, + |prompt| { + prompt.push_str(CODE_SNIPPET_FILE_PATH_PREFIX); + prompt.push_str(related_file.path.to_string_lossy().as_ref()); + prompt.push('\n'); + prompt.push_str(&related_excerpt.text.to_string()); + }, + ); + } + } + }, + ); + + push_delimited( + &mut prompt, + CURRENT_FILE_CONTENT_START..CURRENT_FILE_CONTENT_END, + |prompt| { + prompt.push_str(CURRENT_FILE_PATH_PREFIX); + prompt.push_str(inputs.cursor_path.as_os_str().to_string_lossy().as_ref()); + prompt.push('\n'); + + prompt.push_str(&inputs.cursor_excerpt[0..inputs.editable_range_in_excerpt.start]); + push_delimited(prompt, CODE_TO_EDIT_START..CODE_TO_EDIT_END, |prompt| { + prompt.push_str( + &inputs.cursor_excerpt + [inputs.editable_range_in_excerpt.start..inputs.cursor_offset_in_excerpt], + ); + prompt.push_str(CURSOR_TAG); + prompt.push_str( + &inputs.cursor_excerpt + [inputs.cursor_offset_in_excerpt..inputs.editable_range_in_excerpt.end], + ); + }); + prompt.push_str(&inputs.cursor_excerpt[inputs.editable_range_in_excerpt.end..]); + }, + ); + + push_delimited( + &mut prompt, + EDIT_DIFF_HISTORY_START..EDIT_DIFF_HISTORY_END, + |prompt| { + for event in inputs.events.iter() { + zeta_prompt::write_event(prompt, &event); + } + }, + ); + + prompt +} + +fn push_delimited(prompt: &mut String, delimiters: Range<&str>, cb: impl FnOnce(&mut String)) { + prompt.push_str(delimiters.start); + cb(prompt); + prompt.push_str(delimiters.end); +} + +pub const MERCURY_CREDENTIALS_URL: SharedString = + SharedString::new_static("https://api.inceptionlabs.ai/v1/edit/completions"); +pub const MERCURY_CREDENTIALS_USERNAME: &str = "mercury-api-token"; +pub static MERCURY_TOKEN_ENV_VAR: std::sync::LazyLock = env_var!("MERCURY_AI_TOKEN"); +pub static MERCURY_API_KEY: std::sync::OnceLock> = std::sync::OnceLock::new(); + +pub fn mercury_api_token(cx: &mut App) -> Entity { + MERCURY_API_KEY + .get_or_init(|| { + cx.new(|_| ApiKeyState::new(MERCURY_CREDENTIALS_URL, MERCURY_TOKEN_ENV_VAR.clone())) + }) + .clone() +} + +pub fn load_mercury_api_token(cx: &mut App) -> Task> { + mercury_api_token(cx).update(cx, |key_state, cx| { + key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx) + }) +} diff --git a/crates/zeta/src/onboarding_modal.rs b/crates/edit_prediction/src/onboarding_modal.rs similarity index 93% rename from crates/zeta/src/onboarding_modal.rs rename to crates/edit_prediction/src/onboarding_modal.rs index 94480add3053bece5017cf478e9f74065491639b..97f529ae38df350ef21ffc04b79df6e8e6a7a501 100644 --- a/crates/zeta/src/onboarding_modal.rs +++ b/crates/edit_prediction/src/onboarding_modal.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::{ZedPredictUpsell, onboarding_event}; +use crate::ZedPredictUpsell; use ai_onboarding::EditPredictionOnboarding; use client::{Client, UserStore}; use db::kvp::Dismissable; @@ -14,6 +14,16 @@ use settings::update_settings_file; use ui::{Vector, VectorName, prelude::*}; use workspace::{ModalView, Workspace}; +#[macro_export] +macro_rules! onboarding_event { + ($name:expr) => { + telemetry::event!($name, source = "Edit Prediction Onboarding"); + }; + ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => { + telemetry::event!($name, source = "Edit Prediction Onboarding", $($key $(= $value)?),+); + }; +} + /// Introduces user to Zed's Edit Prediction feature pub struct ZedPredictModal { onboarding: Entity, @@ -121,8 +131,8 @@ impl Render for ZedPredictModal { onboarding_event!("Cancelled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child( div() diff --git a/crates/edit_prediction/src/open_ai_response.rs b/crates/edit_prediction/src/open_ai_response.rs new file mode 100644 index 0000000000000000000000000000000000000000..c7e3350936dd89c89849130ba279ad2914dd2bd8 --- /dev/null +++ b/crates/edit_prediction/src/open_ai_response.rs @@ -0,0 +1,31 @@ +pub fn text_from_response(mut res: open_ai::Response) -> Option { + let choice = res.choices.pop()?; + let output_text = match choice.message { + open_ai::RequestMessage::Assistant { + content: Some(open_ai::MessageContent::Plain(content)), + .. + } => content, + open_ai::RequestMessage::Assistant { + content: Some(open_ai::MessageContent::Multipart(mut content)), + .. + } => { + if content.is_empty() { + log::error!("No output from Baseten completion response"); + return None; + } + + match content.remove(0) { + open_ai::MessagePart::Text { text } => text, + open_ai::MessagePart::Image { .. } => { + log::error!("Expected text, got an image"); + return None; + } + } + } + _ => { + log::error!("Invalid response message: {:?}", choice.message); + return None; + } + }; + Some(output_text) +} diff --git a/crates/zeta2/src/prediction.rs b/crates/edit_prediction/src/prediction.rs similarity index 73% rename from crates/zeta2/src/prediction.rs rename to crates/edit_prediction/src/prediction.rs index e9f726ce00c36b5235919c0e185876996f4fda03..c63640ccd0e1815b32f736e8a0fee8d75d124df1 100644 --- a/crates/zeta2/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -1,7 +1,14 @@ -use std::{ops::Range, sync::Arc}; - +use std::{ + ops::Range, + sync::Arc, + time::{Duration, Instant}, +}; + +use cloud_llm_client::EditPredictionRejectReason; +use edit_prediction_types::interpolate_edits; use gpui::{AsyncApp, Entity, SharedString}; -use language::{Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, TextBufferSnapshot}; +use language::{Anchor, Buffer, BufferSnapshot, EditPreview, TextBufferSnapshot}; +use zeta_prompt::ZetaPromptInput; #[derive(Clone, Default, Debug, PartialEq, Eq, Hash)] pub struct EditPredictionId(pub SharedString); @@ -18,55 +25,92 @@ impl std::fmt::Display for EditPredictionId { } } -#[derive(Clone)] -pub struct EditPrediction { +/// A prediction response that was returned from the provider, whether it was ultimately valid or not. +pub struct EditPredictionResult { pub id: EditPredictionId, - pub edits: Arc<[(Range, Arc)]>, - pub snapshot: BufferSnapshot, - pub edit_preview: EditPreview, - // We keep a reference to the buffer so that we do not need to reload it from disk when applying the prediction. - pub buffer: Entity, + pub prediction: Result, } -impl EditPrediction { +impl EditPredictionResult { pub async fn new( id: EditPredictionId, edited_buffer: &Entity, edited_buffer_snapshot: &BufferSnapshot, - edits: Vec<(Range, Arc)>, + edits: Arc<[(Range, Arc)]>, + buffer_snapshotted_at: Instant, + response_received_at: Instant, + inputs: ZetaPromptInput, cx: &mut AsyncApp, - ) -> Option { - let (edits, snapshot, edit_preview_task) = edited_buffer + ) -> Self { + if edits.is_empty() { + return Self { + id, + prediction: Err(EditPredictionRejectReason::Empty), + }; + } + + let Some((edits, snapshot, edit_preview_task)) = edited_buffer .read_with(cx, |buffer, cx| { let new_snapshot = buffer.snapshot(); let edits: Arc<[_]> = - interpolate_edits(&edited_buffer_snapshot, &new_snapshot, edits.into())?.into(); + interpolate_edits(&edited_buffer_snapshot, &new_snapshot, &edits)?.into(); Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx))) }) - .ok()??; + .ok() + .flatten() + else { + return Self { + id, + prediction: Err(EditPredictionRejectReason::InterpolatedEmpty), + }; + }; let edit_preview = edit_preview_task.await; - Some(EditPrediction { - id, - edits, - snapshot, - edit_preview, - buffer: edited_buffer.clone(), - }) + Self { + id: id.clone(), + prediction: Ok(EditPrediction { + id, + edits, + snapshot, + edit_preview, + inputs, + buffer: edited_buffer.clone(), + buffer_snapshotted_at, + response_received_at, + }), + } } +} +#[derive(Clone)] +pub struct EditPrediction { + pub id: EditPredictionId, + pub edits: Arc<[(Range, Arc)]>, + pub snapshot: BufferSnapshot, + pub edit_preview: EditPreview, + pub buffer: Entity, + pub buffer_snapshotted_at: Instant, + pub response_received_at: Instant, + pub inputs: zeta_prompt::ZetaPromptInput, +} + +impl EditPrediction { pub fn interpolate( &self, new_snapshot: &TextBufferSnapshot, ) -> Option, Arc)>> { - interpolate_edits(&self.snapshot, new_snapshot, self.edits.clone()) + interpolate_edits(&self.snapshot, new_snapshot, &self.edits) } pub fn targets_buffer(&self, buffer: &Buffer) -> bool { self.snapshot.remote_id() == buffer.remote_id() } + + pub fn latency(&self) -> Duration { + self.response_received_at - self.buffer_snapshotted_at + } } impl std::fmt::Debug for EditPrediction { @@ -78,57 +122,14 @@ impl std::fmt::Debug for EditPrediction { } } -pub fn interpolate_edits( - old_snapshot: &TextBufferSnapshot, - new_snapshot: &TextBufferSnapshot, - current_edits: Arc<[(Range, Arc)]>, -) -> Option, Arc)>> { - let mut edits = Vec::new(); - - let mut model_edits = current_edits.iter().peekable(); - for user_edit in new_snapshot.edits_since::(&old_snapshot.version) { - while let Some((model_old_range, _)) = model_edits.peek() { - let model_old_range = model_old_range.to_offset(old_snapshot); - if model_old_range.end < user_edit.old.start { - let (model_old_range, model_new_text) = model_edits.next().unwrap(); - edits.push((model_old_range.clone(), model_new_text.clone())); - } else { - break; - } - } - - if let Some((model_old_range, model_new_text)) = model_edits.peek() { - let model_old_offset_range = model_old_range.to_offset(old_snapshot); - if user_edit.old == model_old_offset_range { - let user_new_text = new_snapshot - .text_for_range(user_edit.new.clone()) - .collect::(); - - if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) { - if !model_suffix.is_empty() { - let anchor = old_snapshot.anchor_after(user_edit.old.end); - edits.push((anchor..anchor, model_suffix.into())); - } - - model_edits.next(); - continue; - } - } - } - - return None; - } - - edits.extend(model_edits.cloned()); - - if edits.is_empty() { None } else { Some(edits) } -} - #[cfg(test)] mod tests { + use std::path::Path; + use super::*; use gpui::{App, Entity, TestAppContext, prelude::*}; use language::{Buffer, ToOffset as _}; + use zeta_prompt::ZetaPromptInput; #[gpui::test] async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { @@ -147,6 +148,16 @@ mod tests { snapshot: cx.read(|cx| buffer.read(cx).snapshot()), buffer: buffer.clone(), edit_preview, + inputs: ZetaPromptInput { + events: vec![], + related_files: vec![].into(), + cursor_path: Path::new("path.txt").into(), + cursor_offset_in_excerpt: 0, + cursor_excerpt: "".into(), + editable_range_in_excerpt: 0..0, + }, + buffer_snapshotted_at: Instant::now(), + response_received_at: Instant::now(), }; cx.update(|cx| { diff --git a/crates/edit_prediction/src/sweep_ai.rs b/crates/edit_prediction/src/sweep_ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..2ed24cd8ef728383ec800acbb2ab7c7b99f07c06 --- /dev/null +++ b/crates/edit_prediction/src/sweep_ai.rs @@ -0,0 +1,401 @@ +use anyhow::Result; +use futures::AsyncReadExt as _; +use gpui::{ + App, AppContext as _, Entity, SharedString, Task, + http_client::{self, AsyncBody, Method}, +}; +use language::{Point, ToOffset as _}; +use language_model::{ApiKeyState, EnvVar, env_var}; +use lsp::DiagnosticSeverity; +use serde::{Deserialize, Serialize}; +use std::{ + fmt::{self, Write as _}, + path::Path, + sync::Arc, + time::Instant, +}; + +use crate::{EditPredictionId, EditPredictionModelInput, prediction::EditPredictionResult}; + +const SWEEP_API_URL: &str = "https://autocomplete.sweep.dev/backend/next_edit_autocomplete"; + +pub struct SweepAi { + pub api_token: Entity, + pub debug_info: Arc, +} + +impl SweepAi { + pub fn new(cx: &mut App) -> Self { + SweepAi { + api_token: sweep_api_token(cx), + debug_info: debug_info(cx), + } + } + + pub fn request_prediction_with_sweep( + &self, + inputs: EditPredictionModelInput, + cx: &mut App, + ) -> Task>> { + let debug_info = self.debug_info.clone(); + self.api_token.update(cx, |key_state, cx| { + _ = key_state.load_if_needed(SWEEP_CREDENTIALS_URL, |s| s, cx); + }); + let Some(api_token) = self.api_token.read(cx).key(&SWEEP_CREDENTIALS_URL) else { + return Task::ready(Ok(None)); + }; + let full_path: Arc = inputs + .snapshot + .file() + .map(|file| file.full_path(cx)) + .unwrap_or_else(|| "untitled".into()) + .into(); + + let project_file = project::File::from_dyn(inputs.snapshot.file()); + let repo_name = project_file + .map(|file| file.worktree.read(cx).root_name_str()) + .unwrap_or("untitled") + .into(); + let offset = inputs.position.to_offset(&inputs.snapshot); + + let recent_buffers = inputs.recent_paths.iter().cloned(); + let http_client = cx.http_client(); + + let recent_buffer_snapshots = recent_buffers + .filter_map(|project_path| { + let buffer = inputs.project.read(cx).get_open_buffer(&project_path, cx)?; + if inputs.buffer == buffer { + None + } else { + Some(buffer.read(cx).snapshot()) + } + }) + .take(3) + .collect::>(); + + let buffer_snapshotted_at = Instant::now(); + + let result = cx.background_spawn(async move { + let text = inputs.snapshot.text(); + + let mut recent_changes = String::new(); + for event in &inputs.events { + write_event(event.as_ref(), &mut recent_changes).unwrap(); + } + + let mut file_chunks = recent_buffer_snapshots + .into_iter() + .map(|snapshot| { + let end_point = Point::new(30, 0).min(snapshot.max_point()); + FileChunk { + content: snapshot.text_for_range(Point::zero()..end_point).collect(), + file_path: snapshot + .file() + .map(|f| f.path().as_unix_str()) + .unwrap_or("untitled") + .to_string(), + start_line: 0, + end_line: end_point.row as usize, + timestamp: snapshot.file().and_then(|file| { + Some( + file.disk_state() + .mtime()? + .to_seconds_and_nanos_for_persistence()? + .0, + ) + }), + } + }) + .collect::>(); + + let retrieval_chunks = inputs + .related_files + .iter() + .flat_map(|related_file| { + related_file.excerpts.iter().map(|excerpt| FileChunk { + file_path: related_file.path.to_string_lossy().to_string(), + start_line: excerpt.row_range.start as usize, + end_line: excerpt.row_range.end as usize, + content: excerpt.text.to_string(), + timestamp: None, + }) + }) + .collect(); + + let diagnostic_entries = inputs + .snapshot + .diagnostics_in_range(inputs.diagnostic_search_range, false); + let mut diagnostic_content = String::new(); + let mut diagnostic_count = 0; + + for entry in diagnostic_entries { + let start_point: Point = entry.range.start; + + let severity = match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => "error", + DiagnosticSeverity::WARNING => "warning", + DiagnosticSeverity::INFORMATION => "info", + DiagnosticSeverity::HINT => "hint", + _ => continue, + }; + + diagnostic_count += 1; + + writeln!( + &mut diagnostic_content, + "{} at line {}: {}", + severity, + start_point.row + 1, + entry.diagnostic.message + )?; + } + + if !diagnostic_content.is_empty() { + file_chunks.push(FileChunk { + file_path: format!("Diagnostics for {}", full_path.display()), + start_line: 0, + end_line: diagnostic_count, + content: diagnostic_content, + timestamp: None, + }); + } + + let request_body = AutocompleteRequest { + debug_info, + repo_name, + file_path: full_path.clone(), + file_contents: text.clone(), + original_file_contents: text, + cursor_position: offset, + recent_changes: recent_changes.clone(), + changes_above_cursor: true, + multiple_suggestions: false, + branch: None, + file_chunks, + retrieval_chunks, + recent_user_actions: vec![], + use_bytes: true, + // TODO + privacy_mode_enabled: false, + }; + + let mut buf: Vec = Vec::new(); + let writer = brotli::CompressorWriter::new(&mut buf, 4096, 11, 22); + serde_json::to_writer(writer, &request_body)?; + let body: AsyncBody = buf.into(); + + let ep_inputs = zeta_prompt::ZetaPromptInput { + events: inputs.events, + related_files: inputs.related_files.clone(), + cursor_path: full_path.clone(), + cursor_excerpt: request_body.file_contents.into(), + // we actually don't know + editable_range_in_excerpt: 0..inputs.snapshot.len(), + cursor_offset_in_excerpt: request_body.cursor_position, + }; + + let request = http_client::Request::builder() + .uri(SWEEP_API_URL) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_token)) + .header("Connection", "keep-alive") + .header("Content-Encoding", "br") + .method(Method::POST) + .body(body)?; + + let mut response = http_client.send(request).await?; + + let mut body: Vec = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + let response_received_at = Instant::now(); + if !response.status().is_success() { + anyhow::bail!( + "Request failed with status: {:?}\nBody: {}", + response.status(), + String::from_utf8_lossy(&body), + ); + }; + + let response: AutocompleteResponse = serde_json::from_slice(&body)?; + + let old_text = inputs + .snapshot + .text_for_range(response.start_index..response.end_index) + .collect::(); + let edits = language::text_diff(&old_text, &response.completion) + .into_iter() + .map(|(range, text)| { + ( + inputs + .snapshot + .anchor_after(response.start_index + range.start) + ..inputs + .snapshot + .anchor_before(response.start_index + range.end), + text, + ) + }) + .collect::>(); + + anyhow::Ok(( + response.autocomplete_id, + edits, + inputs.snapshot, + response_received_at, + ep_inputs, + )) + }); + + let buffer = inputs.buffer.clone(); + + cx.spawn(async move |cx| { + let (id, edits, old_snapshot, response_received_at, inputs) = result.await?; + anyhow::Ok(Some( + EditPredictionResult::new( + EditPredictionId(id.into()), + &buffer, + &old_snapshot, + edits.into(), + buffer_snapshotted_at, + response_received_at, + inputs, + cx, + ) + .await, + )) + }) + } +} + +pub const SWEEP_CREDENTIALS_URL: SharedString = + SharedString::new_static("https://autocomplete.sweep.dev"); +pub const SWEEP_CREDENTIALS_USERNAME: &str = "sweep-api-token"; +pub static SWEEP_AI_TOKEN_ENV_VAR: std::sync::LazyLock = env_var!("SWEEP_AI_TOKEN"); +pub static SWEEP_API_KEY: std::sync::OnceLock> = std::sync::OnceLock::new(); + +pub fn sweep_api_token(cx: &mut App) -> Entity { + SWEEP_API_KEY + .get_or_init(|| { + cx.new(|_| ApiKeyState::new(SWEEP_CREDENTIALS_URL, SWEEP_AI_TOKEN_ENV_VAR.clone())) + }) + .clone() +} + +pub fn load_sweep_api_token(cx: &mut App) -> Task> { + sweep_api_token(cx).update(cx, |key_state, cx| { + key_state.load_if_needed(SWEEP_CREDENTIALS_URL, |s| s, cx) + }) +} + +#[derive(Debug, Clone, Serialize)] +struct AutocompleteRequest { + pub debug_info: Arc, + pub repo_name: String, + pub branch: Option, + pub file_path: Arc, + pub file_contents: String, + pub recent_changes: String, + pub cursor_position: usize, + pub original_file_contents: String, + pub file_chunks: Vec, + pub retrieval_chunks: Vec, + pub recent_user_actions: Vec, + pub multiple_suggestions: bool, + pub privacy_mode_enabled: bool, + pub changes_above_cursor: bool, + pub use_bytes: bool, +} + +#[derive(Debug, Clone, Serialize)] +struct FileChunk { + pub file_path: String, + pub start_line: usize, + pub end_line: usize, + pub content: String, + pub timestamp: Option, +} + +#[derive(Debug, Clone, Serialize)] +struct UserAction { + pub action_type: ActionType, + pub line_number: usize, + pub offset: usize, + pub file_path: String, + pub timestamp: u64, +} + +#[allow(dead_code)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +enum ActionType { + CursorMovement, + InsertChar, + DeleteChar, + InsertSelection, + DeleteSelection, +} + +#[derive(Debug, Clone, Deserialize)] +struct AutocompleteResponse { + pub autocomplete_id: String, + pub start_index: usize, + pub end_index: usize, + pub completion: String, + #[allow(dead_code)] + pub confidence: f64, + #[allow(dead_code)] + pub logprobs: Option, + #[allow(dead_code)] + pub finish_reason: Option, + #[allow(dead_code)] + pub elapsed_time_ms: u64, + #[allow(dead_code)] + #[serde(default, rename = "completions")] + pub additional_completions: Vec, +} + +#[allow(dead_code)] +#[derive(Debug, Clone, Deserialize)] +struct AdditionalCompletion { + pub start_index: usize, + pub end_index: usize, + pub completion: String, + pub confidence: f64, + pub autocomplete_id: String, + pub logprobs: Option, + pub finish_reason: Option, +} + +fn write_event(event: &zeta_prompt::Event, f: &mut impl fmt::Write) -> fmt::Result { + match event { + zeta_prompt::Event::BufferChange { + old_path, + path, + diff, + .. + } => { + if old_path != path { + // TODO confirm how to do this for sweep + // writeln!(f, "User renamed {:?} to {:?}\n", old_path, new_path)?; + } + + if !diff.is_empty() { + write!(f, "File: {}:\n{}\n", path.display(), diff)? + } + + fmt::Result::Ok(()) + } + } +} + +fn debug_info(cx: &gpui::App) -> Arc { + format!( + "Zed v{version} ({sha}) - OS: {os} - Zed v{version}", + version = release_channel::AppVersion::global(cx), + sha = release_channel::AppCommitSha::try_global(cx) + .map_or("unknown".to_string(), |sha| sha.full()), + os = client::telemetry::os_name(), + ) + .into() +} diff --git a/crates/zeta2/src/udiff.rs b/crates/edit_prediction/src/udiff.rs similarity index 78% rename from crates/zeta2/src/udiff.rs rename to crates/edit_prediction/src/udiff.rs index d765a64345f839b9314632444d209fa79e9ca5ce..78fec03dd78301d56ac6e3f914ba60432e41637d 100644 --- a/crates/zeta2/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -14,87 +14,48 @@ use anyhow::anyhow; use collections::HashMap; use gpui::AsyncApp; use gpui::Entity; -use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot}; -use project::Project; +use language::{Anchor, Buffer, OffsetRangeExt as _, TextBufferSnapshot}; +use project::{Project, ProjectPath}; +use util::paths::PathStyle; +use util::rel_path::RelPath; -pub async fn parse_diff<'a>( - diff_str: &'a str, - get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range])> + Send, -) -> Result<(&'a BufferSnapshot, Vec<(Range, Arc)>)> { - let mut diff = DiffParser::new(diff_str); - let mut edited_buffer = None; - let mut edits = Vec::new(); - - while let Some(event) = diff.next()? { - match event { - DiffEvent::Hunk { - path: file_path, - hunk, - } => { - let (buffer, ranges) = match edited_buffer { - None => { - edited_buffer = get_buffer(&Path::new(file_path.as_ref())); - edited_buffer - .as_ref() - .context("Model tried to edit a file that wasn't included")? - } - Some(ref current) => current, - }; - - edits.extend( - resolve_hunk_edits_in_buffer(hunk, &buffer.text, ranges) - .with_context(|| format!("Diff:\n{diff_str}"))?, - ); - } - DiffEvent::FileEnd { renamed_to } => { - let (buffer, _) = edited_buffer - .take() - .expect("Got a FileEnd event before an Hunk event"); - - if renamed_to.is_some() { - anyhow::bail!("edit predictions cannot rename files"); - } - - if diff.next()?.is_some() { - anyhow::bail!("Edited more than one file"); - } - - return Ok((buffer, edits)); - } - } - } - - Err(anyhow::anyhow!("No EOF")) -} - -#[derive(Debug)] -pub struct OpenedBuffers<'a>(#[allow(unused)] HashMap, Entity>); +#[derive(Clone, Debug)] +pub struct OpenedBuffers(#[allow(unused)] HashMap>); #[must_use] -pub async fn apply_diff<'a>( - diff_str: &'a str, +pub async fn apply_diff( + diff_str: &str, project: &Entity, cx: &mut AsyncApp, -) -> Result> { +) -> Result { let mut included_files = HashMap::default(); + let worktree_id = project.read_with(cx, |project, cx| { + anyhow::Ok( + project + .visible_worktrees(cx) + .next() + .context("no worktrees")? + .read(cx) + .id(), + ) + })??; + for line in diff_str.lines() { let diff_line = DiffLine::parse(line); if let DiffLine::OldPath { path } = diff_line { let buffer = project .update(cx, |project, cx| { - let project_path = - project - .find_project_path(path.as_ref(), cx) - .with_context(|| { - format!("Failed to find worktree for new path: {}", path) - })?; + let project_path = ProjectPath { + worktree_id, + path: RelPath::new(Path::new(path.as_ref()), PathStyle::Posix)?.into_arc(), + }; anyhow::Ok(project.open_buffer(project_path, cx)) })?? .await?; - included_files.insert(path, buffer); + included_files.insert(path.to_string(), buffer); } } @@ -113,7 +74,7 @@ pub async fn apply_diff<'a>( let (buffer, ranges) = match current_file { None => { let buffer = included_files - .get_mut(&file_path) + .get_mut(file_path.as_ref()) .expect("Opened all files in diff"); current_file = Some((buffer, ranges.as_slice())); @@ -133,7 +94,7 @@ pub async fn apply_diff<'a>( DiffEvent::FileEnd { renamed_to } => { let (buffer, _) = current_file .take() - .expect("Got a FileEnd event before an Hunk event"); + .context("Got a FileEnd event before an Hunk event")?; if let Some(renamed_to) = renamed_to { project @@ -167,6 +128,29 @@ pub async fn apply_diff<'a>( Ok(OpenedBuffers(included_files)) } +pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { + let mut diff = DiffParser::new(diff_str); + + let mut text = text.to_string(); + + while let Some(event) = diff.next()? { + match event { + DiffEvent::Hunk { hunk, .. } => { + let hunk_offset = text + .find(&hunk.context) + .ok_or_else(|| anyhow!("couldn't resolve hunk {:?}", hunk.context))?; + for edit in hunk.edits.iter().rev() { + let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); + text.replace_range(range, &edit.text); + } + } + DiffEvent::FileEnd { .. } => {} + } + } + + Ok(text) +} + struct PatchFile<'a> { old_path: Cow<'a, str>, new_path: Cow<'a, str>, @@ -391,10 +375,12 @@ impl<'a> DiffLine<'a> { return Some(Self::HunkHeader(None)); } - let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?; - let mut parts = header.split_ascii_whitespace(); - let count_old = parts.next()?; - let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?; + let mut tokens = header.split_whitespace(); + let old_range = tokens.next()?.strip_prefix('-')?; + let new_range = tokens.next()?.strip_prefix('+')?; + + let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); + let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); Some(Self::HunkHeader(Some(HunkLocation { start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), @@ -490,7 +476,6 @@ mod tests { use super::*; use gpui::TestAppContext; use indoc::indoc; - use language::Point; use pretty_assertions::assert_eq; use project::{FakeFs, Project}; use serde_json::json; @@ -752,38 +737,38 @@ mod tests { let project = Project::test(fs, [path!("/root").as_ref()], cx).await; let diff = indoc! {r#" - --- a/root/file1 - +++ b/root/file1 + --- a/file1 + +++ b/file1 one two -three +3 four five - --- a/root/file1 - +++ b/root/file1 + --- a/file1 + +++ b/file1 3 -four -five +4 +5 - --- a/root/file1 - +++ b/root/file1 + --- a/file1 + +++ b/file1 -one -two 3 4 - --- a/root/file2 - +++ b/root/file2 + --- a/file2 + +++ b/file2 +5 six - --- a/root/file2 - +++ b/root/file2 + --- a/file2 + +++ b/file2 seven +7.5 eight - --- a/root/file2 - +++ b/root/file2 + --- a/file2 + +++ b/file2 ten +11 "#}; @@ -815,137 +800,6 @@ mod tests { }); } - #[gpui::test] - async fn test_apply_diff_non_unique(cx: &mut TestAppContext) { - let fs = init_test(cx); - - let buffer_1_text = indoc! {r#" - one - two - three - four - five - one - two - three - four - five - "# }; - - fs.insert_tree( - path!("/root"), - json!({ - "file1": buffer_1_text, - }), - ) - .await; - - let project = Project::test(fs, [path!("/root").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/root/file1"), cx) - }) - .await - .unwrap(); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let diff = indoc! {r#" - --- a/root/file1 - +++ b/root/file1 - one - two - -three - +3 - four - five - "#}; - - let final_text = indoc! {r#" - one - two - three - four - five - one - two - 3 - four - five - "#}; - - apply_diff(diff, &project, &mut cx.to_async()) - .await - .expect_err("Non-unique edits should fail"); - - let ranges = [buffer_snapshot.anchor_before(Point::new(1, 0)) - ..buffer_snapshot.anchor_after(buffer_snapshot.max_point())]; - - let (edited_snapshot, edits) = parse_diff(diff, |_path| Some((&buffer_snapshot, &ranges))) - .await - .unwrap(); - - assert_eq!(edited_snapshot.remote_id(), buffer_snapshot.remote_id()); - buffer.update(cx, |buffer, cx| { - buffer.edit(edits, None, cx); - assert_eq!(buffer.text(), final_text); - }); - } - - #[gpui::test] - async fn test_parse_diff_with_edits_within_line(cx: &mut TestAppContext) { - let fs = init_test(cx); - - let buffer_1_text = indoc! {r#" - one two three four - five six seven eight - nine ten eleven twelve - "# }; - - fs.insert_tree( - path!("/root"), - json!({ - "file1": buffer_1_text, - }), - ) - .await; - - let project = Project::test(fs, [path!("/root").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/root/file1"), cx) - }) - .await - .unwrap(); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let diff = indoc! {r#" - --- a/root/file1 - +++ b/root/file1 - one two three four - -five six seven eight - +five SIX seven eight! - nine ten eleven twelve - "#}; - - let (buffer, edits) = parse_diff(diff, |_path| { - Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_])) - }) - .await - .unwrap(); - - let edits = edits - .into_iter() - .map(|(range, text)| (range.to_point(&buffer), text)) - .collect::>(); - assert_eq!( - edits, - &[ - (Point::new(1, 5)..Point::new(1, 8), "SIX".into()), - (Point::new(1, 20)..Point::new(1, 20), "!".into()) - ] - ); - } - #[gpui::test] async fn test_apply_diff_unique_via_previous_context(cx: &mut TestAppContext) { let fs = init_test(cx); @@ -983,8 +837,8 @@ mod tests { let project = Project::test(fs, [path!("/root").as_ref()], cx).await; let diff = indoc! {r#" - --- a/root/file1 - +++ b/root/file1 + --- a/file1 + +++ b/file1 one two -three diff --git a/crates/edit_prediction/src/zed_edit_prediction_delegate.rs b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs new file mode 100644 index 0000000000000000000000000000000000000000..289bcd76daab2b9a4b82db88b86285e6c7aca00d --- /dev/null +++ b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs @@ -0,0 +1,230 @@ +use std::{cmp, sync::Arc}; + +use client::{Client, UserStore}; +use cloud_llm_client::EditPredictionRejectReason; +use edit_prediction_types::{DataCollectionState, EditPredictionDelegate}; +use gpui::{App, Entity, prelude::*}; +use language::{Buffer, ToPoint as _}; +use project::Project; + +use crate::{BufferEditPrediction, EditPredictionModel, EditPredictionStore}; + +pub struct ZedEditPredictionDelegate { + store: Entity, + project: Entity, + singleton_buffer: Option>, +} + +impl ZedEditPredictionDelegate { + pub fn new( + project: Entity, + singleton_buffer: Option>, + client: &Arc, + user_store: &Entity, + cx: &mut Context, + ) -> Self { + let store = EditPredictionStore::global(client, user_store, cx); + store.update(cx, |store, cx| { + store.register_project(&project, cx); + }); + + cx.observe(&store, |_this, _ep_store, cx| { + cx.notify(); + }) + .detach(); + + Self { + project: project, + store: store, + singleton_buffer, + } + } +} + +impl EditPredictionDelegate for ZedEditPredictionDelegate { + fn name() -> &'static str { + "zed-predict" + } + + fn display_name() -> &'static str { + "Zed's Edit Predictions" + } + + fn show_predictions_in_menu() -> bool { + true + } + + fn show_tab_accept_marker() -> bool { + true + } + + fn data_collection_state(&self, cx: &App) -> DataCollectionState { + if let Some(buffer) = &self.singleton_buffer + && let Some(file) = buffer.read(cx).file() + { + let is_project_open_source = + self.store + .read(cx) + .is_file_open_source(&self.project, file, cx); + if self.store.read(cx).data_collection_choice.is_enabled() { + DataCollectionState::Enabled { + is_project_open_source, + } + } else { + DataCollectionState::Disabled { + is_project_open_source, + } + } + } else { + return DataCollectionState::Disabled { + is_project_open_source: false, + }; + } + } + + fn toggle_data_collection(&mut self, cx: &mut App) { + self.store.update(cx, |store, cx| { + store.toggle_data_collection_choice(cx); + }); + } + + fn usage(&self, cx: &App) -> Option { + self.store.read(cx).usage(cx) + } + + fn is_enabled( + &self, + _buffer: &Entity, + _cursor_position: language::Anchor, + cx: &App, + ) -> bool { + let store = self.store.read(cx); + if store.edit_prediction_model == EditPredictionModel::Sweep { + store.has_sweep_api_token(cx) + } else { + true + } + } + + fn is_refreshing(&self, cx: &App) -> bool { + self.store.read(cx).is_refreshing(&self.project) + } + + fn refresh( + &mut self, + buffer: Entity, + cursor_position: language::Anchor, + _debounce: bool, + cx: &mut Context, + ) { + let store = self.store.read(cx); + + if store.user_store.read_with(cx, |user_store, _cx| { + user_store.account_too_young() || user_store.has_overdue_invoices() + }) { + return; + } + + self.store.update(cx, |store, cx| { + if let Some(current) = + store.prediction_at(&buffer, Some(cursor_position), &self.project, cx) + && let BufferEditPrediction::Local { prediction } = current + && prediction.interpolate(buffer.read(cx)).is_some() + { + return; + } + + store.refresh_context(&self.project, &buffer, cursor_position, cx); + store.refresh_prediction_from_buffer(self.project.clone(), buffer, cursor_position, cx) + }); + } + + fn accept(&mut self, cx: &mut Context) { + self.store.update(cx, |store, cx| { + store.accept_current_prediction(&self.project, cx); + }); + } + + fn discard(&mut self, cx: &mut Context) { + self.store.update(cx, |store, _cx| { + store.reject_current_prediction(EditPredictionRejectReason::Discarded, &self.project); + }); + } + + fn did_show(&mut self, cx: &mut Context) { + self.store.update(cx, |store, cx| { + store.did_show_current_prediction(&self.project, cx); + }); + } + + fn suggest( + &mut self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut Context, + ) -> Option { + self.store.update(cx, |store, cx| { + let prediction = + store.prediction_at(buffer, Some(cursor_position), &self.project, cx)?; + + let prediction = match prediction { + BufferEditPrediction::Local { prediction } => prediction, + BufferEditPrediction::Jump { prediction } => { + return Some(edit_prediction_types::EditPrediction::Jump { + id: Some(prediction.id.to_string().into()), + snapshot: prediction.snapshot.clone(), + target: prediction.edits.first().unwrap().0.start, + }); + } + }; + + let buffer = buffer.read(cx); + let snapshot = buffer.snapshot(); + + let Some(edits) = prediction.interpolate(&snapshot) else { + store.reject_current_prediction( + EditPredictionRejectReason::InterpolatedEmpty, + &self.project, + ); + return None; + }; + + let cursor_row = cursor_position.to_point(&snapshot).row; + let (closest_edit_ix, (closest_edit_range, _)) = + edits.iter().enumerate().min_by_key(|(_, (range, _))| { + let distance_from_start = + cursor_row.abs_diff(range.start.to_point(&snapshot).row); + let distance_from_end = cursor_row.abs_diff(range.end.to_point(&snapshot).row); + cmp::min(distance_from_start, distance_from_end) + })?; + + let mut edit_start_ix = closest_edit_ix; + for (range, _) in edits[..edit_start_ix].iter().rev() { + let distance_from_closest_edit = closest_edit_range.start.to_point(&snapshot).row + - range.end.to_point(&snapshot).row; + if distance_from_closest_edit <= 1 { + edit_start_ix -= 1; + } else { + break; + } + } + + let mut edit_end_ix = closest_edit_ix + 1; + for (range, _) in &edits[edit_end_ix..] { + let distance_from_closest_edit = range.start.to_point(buffer).row + - closest_edit_range.end.to_point(&snapshot).row; + if distance_from_closest_edit <= 1 { + edit_end_ix += 1; + } else { + break; + } + } + + Some(edit_prediction_types::EditPrediction::Local { + id: Some(prediction.id.to_string().into()), + edits: edits[edit_start_ix..edit_end_ix].to_vec(), + edit_preview: Some(prediction.edit_preview.clone()), + }) + }) + } +} diff --git a/crates/edit_prediction/src/zeta1.rs b/crates/edit_prediction/src/zeta1.rs new file mode 100644 index 0000000000000000000000000000000000000000..01c26573307e66cd6ca3bf8ab748ba8d082ea688 --- /dev/null +++ b/crates/edit_prediction/src/zeta1.rs @@ -0,0 +1,671 @@ +use std::{fmt::Write, ops::Range, path::Path, sync::Arc, time::Instant}; + +use crate::{ + DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, EditPredictionModelInput, + EditPredictionStartedDebugEvent, EditPredictionStore, ZedUpdateRequiredError, + cursor_excerpt::{editable_and_context_ranges_for_cursor_position, guess_token_count}, + prediction::EditPredictionResult, +}; +use anyhow::{Context as _, Result}; +use cloud_llm_client::{ + PredictEditsBody, PredictEditsGitInfo, PredictEditsRequestTrigger, PredictEditsResponse, +}; +use gpui::{App, AppContext as _, AsyncApp, Context, Entity, SharedString, Task}; +use language::{ + Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, Point, ToOffset, ToPoint as _, text_diff, +}; +use project::{Project, ProjectPath}; +use release_channel::AppVersion; +use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; +use zeta_prompt::{Event, ZetaPromptInput}; + +const CURSOR_MARKER: &str = "<|user_cursor_is_here|>"; +const START_OF_FILE_MARKER: &str = "<|start_of_file|>"; +const EDITABLE_REGION_START_MARKER: &str = "<|editable_region_start|>"; +const EDITABLE_REGION_END_MARKER: &str = "<|editable_region_end|>"; + +pub(crate) const MAX_CONTEXT_TOKENS: usize = 150; +pub(crate) const MAX_REWRITE_TOKENS: usize = 350; +pub(crate) const MAX_EVENT_TOKENS: usize = 500; + +pub(crate) fn request_prediction_with_zeta1( + store: &mut EditPredictionStore, + EditPredictionModelInput { + project, + buffer, + snapshot, + position, + events, + trigger, + debug_tx, + .. + }: EditPredictionModelInput, + cx: &mut Context, +) -> Task>> { + let buffer_snapshotted_at = Instant::now(); + let client = store.client.clone(); + let llm_token = store.llm_token.clone(); + let app_version = AppVersion::global(cx); + + let (git_info, can_collect_file) = if let Some(file) = snapshot.file() { + let can_collect_file = store.can_collect_file(&project, file, cx); + let git_info = if can_collect_file { + git_info_for_file(&project, &ProjectPath::from_file(file.as_ref(), cx), cx) + } else { + None + }; + (git_info, can_collect_file) + } else { + (None, false) + }; + + let full_path: Arc = snapshot + .file() + .map(|f| Arc::from(f.full_path(cx).as_path())) + .unwrap_or_else(|| Arc::from(Path::new("untitled"))); + let full_path_str = full_path.to_string_lossy().into_owned(); + let cursor_point = position.to_point(&snapshot); + let prompt_for_events = { + let events = events.clone(); + move || prompt_for_events_impl(&events, MAX_EVENT_TOKENS) + }; + let gather_task = gather_context( + full_path_str, + &snapshot, + cursor_point, + prompt_for_events, + trigger, + cx, + ); + + let (uri, require_auth) = match &store.custom_predict_edits_url { + Some(custom_url) => (custom_url.clone(), false), + None => { + match client + .http_client() + .build_zed_llm_url("/predict_edits/v2", &[]) + { + Ok(url) => (url.into(), true), + Err(err) => return Task::ready(Err(err)), + } + } + }; + + cx.spawn(async move |this, cx| { + let GatherContextOutput { + mut body, + context_range, + editable_range, + included_events_count, + } = gather_task.await?; + let done_gathering_context_at = Instant::now(); + + let included_events = &events[events.len() - included_events_count..events.len()]; + body.can_collect_data = can_collect_file + && this + .read_with(cx, |this, _| this.can_collect_events(included_events)) + .unwrap_or(false); + if body.can_collect_data { + body.git_info = git_info; + } + + log::debug!( + "Events:\n{}\nExcerpt:\n{:?}", + body.input_events, + body.input_excerpt + ); + + let response = EditPredictionStore::send_api_request::( + |request| { + Ok(request + .uri(uri.as_str()) + .body(serde_json::to_string(&body)?.into())?) + }, + client, + llm_token, + app_version, + require_auth, + ) + .await; + + let context_start_offset = context_range.start.to_offset(&snapshot); + let editable_offset_range = editable_range.to_offset(&snapshot); + + let inputs = ZetaPromptInput { + events: included_events.into(), + related_files: vec![].into(), + cursor_path: full_path, + cursor_excerpt: snapshot + .text_for_range(context_range) + .collect::() + .into(), + editable_range_in_excerpt: (editable_range.start - context_start_offset) + ..(editable_offset_range.end - context_start_offset), + cursor_offset_in_excerpt: cursor_point.to_offset(&snapshot) - context_start_offset, + }; + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionStarted( + EditPredictionStartedDebugEvent { + buffer: buffer.downgrade(), + prompt: Some(serde_json::to_string(&inputs).unwrap()), + position, + }, + )) + .ok(); + } + + let (response, usage) = match response { + Ok(response) => response, + Err(err) => { + if err.is::() { + cx.update(|cx| { + this.update(cx, |ep_store, _cx| { + ep_store.update_required = true; + }) + .ok(); + + let error_message: SharedString = err.to_string().into(); + show_app_notification( + NotificationId::unique::(), + cx, + move |cx| { + cx.new(|cx| { + ErrorMessagePrompt::new(error_message.clone(), cx) + .with_link_button("Update Zed", "https://zed.dev/releases") + }) + }, + ); + }) + .ok(); + } + + return Err(err); + } + }; + + let received_response_at = Instant::now(); + log::debug!("completion response: {}", &response.output_excerpt); + + if let Some(usage) = usage { + this.update(cx, |this, cx| { + this.user_store.update(cx, |user_store, cx| { + user_store.update_edit_prediction_usage(usage, cx); + }); + }) + .ok(); + } + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionFinished( + EditPredictionFinishedDebugEvent { + buffer: buffer.downgrade(), + model_output: Some(response.output_excerpt.clone()), + position, + }, + )) + .ok(); + } + + let edit_prediction = process_completion_response( + response, + buffer, + &snapshot, + editable_range, + inputs, + buffer_snapshotted_at, + received_response_at, + cx, + ) + .await; + + let finished_at = Instant::now(); + + // record latency for ~1% of requests + if rand::random::() <= 2 { + telemetry::event!( + "Edit Prediction Request", + context_latency = done_gathering_context_at + .duration_since(buffer_snapshotted_at) + .as_millis(), + request_latency = received_response_at + .duration_since(done_gathering_context_at) + .as_millis(), + process_latency = finished_at.duration_since(received_response_at).as_millis() + ); + } + + edit_prediction.map(Some) + }) +} + +fn process_completion_response( + prediction_response: PredictEditsResponse, + buffer: Entity, + snapshot: &BufferSnapshot, + editable_range: Range, + inputs: ZetaPromptInput, + buffer_snapshotted_at: Instant, + received_response_at: Instant, + cx: &AsyncApp, +) -> Task> { + let snapshot = snapshot.clone(); + let request_id = prediction_response.request_id; + let output_excerpt = prediction_response.output_excerpt; + cx.spawn(async move |cx| { + let output_excerpt: Arc = output_excerpt.into(); + + let edits: Arc<[(Range, Arc)]> = cx + .background_spawn({ + let output_excerpt = output_excerpt.clone(); + let editable_range = editable_range.clone(); + let snapshot = snapshot.clone(); + async move { parse_edits(output_excerpt, editable_range, &snapshot) } + }) + .await? + .into(); + + let id = EditPredictionId(request_id.into()); + Ok(EditPredictionResult::new( + id, + &buffer, + &snapshot, + edits, + buffer_snapshotted_at, + received_response_at, + inputs, + cx, + ) + .await) + }) +} + +fn parse_edits( + output_excerpt: Arc, + editable_range: Range, + snapshot: &BufferSnapshot, +) -> Result, Arc)>> { + let content = output_excerpt.replace(CURSOR_MARKER, ""); + + let start_markers = content + .match_indices(EDITABLE_REGION_START_MARKER) + .collect::>(); + anyhow::ensure!( + start_markers.len() == 1, + "expected exactly one start marker, found {}", + start_markers.len() + ); + + let end_markers = content + .match_indices(EDITABLE_REGION_END_MARKER) + .collect::>(); + anyhow::ensure!( + end_markers.len() == 1, + "expected exactly one end marker, found {}", + end_markers.len() + ); + + let sof_markers = content + .match_indices(START_OF_FILE_MARKER) + .collect::>(); + anyhow::ensure!( + sof_markers.len() <= 1, + "expected at most one start-of-file marker, found {}", + sof_markers.len() + ); + + let codefence_start = start_markers[0].0; + let content = &content[codefence_start..]; + + let newline_ix = content.find('\n').context("could not find newline")?; + let content = &content[newline_ix + 1..]; + + let codefence_end = content + .rfind(&format!("\n{EDITABLE_REGION_END_MARKER}")) + .context("could not find end marker")?; + let new_text = &content[..codefence_end]; + + let old_text = snapshot + .text_for_range(editable_range.clone()) + .collect::(); + + Ok(compute_edits( + old_text, + new_text, + editable_range.start, + snapshot, + )) +} + +pub fn compute_edits( + old_text: String, + new_text: &str, + offset: usize, + snapshot: &BufferSnapshot, +) -> Vec<(Range, Arc)> { + text_diff(&old_text, new_text) + .into_iter() + .map(|(mut old_range, new_text)| { + old_range.start += offset; + old_range.end += offset; + + let prefix_len = common_prefix( + snapshot.chars_for_range(old_range.clone()), + new_text.chars(), + ); + old_range.start += prefix_len; + + let suffix_len = common_prefix( + snapshot.reversed_chars_for_range(old_range.clone()), + new_text[prefix_len..].chars().rev(), + ); + old_range.end = old_range.end.saturating_sub(suffix_len); + + let new_text = new_text[prefix_len..new_text.len() - suffix_len].into(); + let range = if old_range.is_empty() { + let anchor = snapshot.anchor_after(old_range.start); + anchor..anchor + } else { + snapshot.anchor_after(old_range.start)..snapshot.anchor_before(old_range.end) + }; + (range, new_text) + }) + .collect() +} + +fn common_prefix, T2: Iterator>(a: T1, b: T2) -> usize { + a.zip(b) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.len_utf8()) + .sum() +} + +fn git_info_for_file( + project: &Entity, + project_path: &ProjectPath, + cx: &App, +) -> Option { + let git_store = project.read(cx).git_store().read(cx); + if let Some((repository, _repo_path)) = + git_store.repository_and_path_for_project_path(project_path, cx) + { + let repository = repository.read(cx); + let head_sha = repository + .head_commit + .as_ref() + .map(|head_commit| head_commit.sha.to_string()); + let remote_origin_url = repository.remote_origin_url.clone(); + let remote_upstream_url = repository.remote_upstream_url.clone(); + if head_sha.is_none() && remote_origin_url.is_none() && remote_upstream_url.is_none() { + return None; + } + Some(PredictEditsGitInfo { + head_sha, + remote_origin_url, + remote_upstream_url, + }) + } else { + None + } +} + +pub struct GatherContextOutput { + pub body: PredictEditsBody, + pub context_range: Range, + pub editable_range: Range, + pub included_events_count: usize, +} + +pub fn gather_context( + full_path_str: String, + snapshot: &BufferSnapshot, + cursor_point: language::Point, + prompt_for_events: impl FnOnce() -> (String, usize) + Send + 'static, + trigger: PredictEditsRequestTrigger, + cx: &App, +) -> Task> { + cx.background_spawn({ + let snapshot = snapshot.clone(); + async move { + let input_excerpt = excerpt_for_cursor_position( + cursor_point, + &full_path_str, + &snapshot, + MAX_REWRITE_TOKENS, + MAX_CONTEXT_TOKENS, + ); + let (input_events, included_events_count) = prompt_for_events(); + let editable_range = input_excerpt.editable_range.to_offset(&snapshot); + + let body = PredictEditsBody { + input_events, + input_excerpt: input_excerpt.prompt, + can_collect_data: false, + diagnostic_groups: None, + git_info: None, + outline: None, + speculated_output: None, + trigger, + }; + + Ok(GatherContextOutput { + body, + context_range: input_excerpt.context_range, + editable_range, + included_events_count, + }) + } + }) +} + +fn prompt_for_events_impl(events: &[Arc], mut remaining_tokens: usize) -> (String, usize) { + let mut result = String::new(); + for (ix, event) in events.iter().rev().enumerate() { + let event_string = format_event(event.as_ref()); + let event_tokens = guess_token_count(event_string.len()); + if event_tokens > remaining_tokens { + return (result, ix); + } + + if !result.is_empty() { + result.insert_str(0, "\n\n"); + } + result.insert_str(0, &event_string); + remaining_tokens -= event_tokens; + } + return (result, events.len()); +} + +pub fn format_event(event: &Event) -> String { + match event { + Event::BufferChange { + path, + old_path, + diff, + .. + } => { + let mut prompt = String::new(); + + if old_path != path { + writeln!( + prompt, + "User renamed {} to {}\n", + old_path.display(), + path.display() + ) + .unwrap(); + } + + if !diff.is_empty() { + write!( + prompt, + "User edited {}:\n```diff\n{}\n```", + path.display(), + diff + ) + .unwrap(); + } + + prompt + } + } +} + +#[derive(Debug)] +pub struct InputExcerpt { + pub context_range: Range, + pub editable_range: Range, + pub prompt: String, +} + +pub fn excerpt_for_cursor_position( + position: Point, + path: &str, + snapshot: &BufferSnapshot, + editable_region_token_limit: usize, + context_token_limit: usize, +) -> InputExcerpt { + let (editable_range, context_range) = editable_and_context_ranges_for_cursor_position( + position, + snapshot, + editable_region_token_limit, + context_token_limit, + ); + + let mut prompt = String::new(); + + writeln!(&mut prompt, "```{path}").unwrap(); + if context_range.start == Point::zero() { + writeln!(&mut prompt, "{START_OF_FILE_MARKER}").unwrap(); + } + + for chunk in snapshot.chunks(context_range.start..editable_range.start, false) { + prompt.push_str(chunk.text); + } + + push_editable_range(position, snapshot, editable_range.clone(), &mut prompt); + + for chunk in snapshot.chunks(editable_range.end..context_range.end, false) { + prompt.push_str(chunk.text); + } + write!(prompt, "\n```").unwrap(); + + InputExcerpt { + context_range, + editable_range, + prompt, + } +} + +fn push_editable_range( + cursor_position: Point, + snapshot: &BufferSnapshot, + editable_range: Range, + prompt: &mut String, +) { + writeln!(prompt, "{EDITABLE_REGION_START_MARKER}").unwrap(); + for chunk in snapshot.chunks(editable_range.start..cursor_position, false) { + prompt.push_str(chunk.text); + } + prompt.push_str(CURSOR_MARKER); + for chunk in snapshot.chunks(cursor_position..editable_range.end, false) { + prompt.push_str(chunk.text); + } + write!(prompt, "\n{EDITABLE_REGION_END_MARKER}").unwrap(); +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{App, AppContext}; + use indoc::indoc; + use language::Buffer; + + #[gpui::test] + fn test_excerpt_for_cursor_position(cx: &mut App) { + let text = indoc! {r#" + fn foo() { + let x = 42; + println!("Hello, world!"); + } + + fn bar() { + let x = 42; + let mut sum = 0; + for i in 0..x { + sum += i; + } + println!("Sum: {}", sum); + return sum; + } + + fn generate_random_numbers() -> Vec { + let mut rng = rand::thread_rng(); + let mut numbers = Vec::new(); + for _ in 0..5 { + numbers.push(rng.random_range(1..101)); + } + numbers + } + "#}; + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(language::rust_lang(), cx)); + let snapshot = buffer.read(cx).snapshot(); + + // Ensure we try to fit the largest possible syntax scope, resorting to line-based expansion + // when a larger scope doesn't fit the editable region. + let excerpt = excerpt_for_cursor_position(Point::new(12, 5), "main.rs", &snapshot, 50, 32); + assert_eq!( + excerpt.prompt, + indoc! {r#" + ```main.rs + let x = 42; + println!("Hello, world!"); + <|editable_region_start|> + } + + fn bar() { + let x = 42; + let mut sum = 0; + for i in 0..x { + sum += i; + } + println!("Sum: {}", sum); + r<|user_cursor_is_here|>eturn sum; + } + + fn generate_random_numbers() -> Vec { + <|editable_region_end|> + let mut rng = rand::thread_rng(); + let mut numbers = Vec::new(); + ```"#} + ); + + // The `bar` function won't fit within the editable region, so we resort to line-based expansion. + let excerpt = excerpt_for_cursor_position(Point::new(12, 5), "main.rs", &snapshot, 40, 32); + assert_eq!( + excerpt.prompt, + indoc! {r#" + ```main.rs + fn bar() { + let x = 42; + let mut sum = 0; + <|editable_region_start|> + for i in 0..x { + sum += i; + } + println!("Sum: {}", sum); + r<|user_cursor_is_here|>eturn sum; + } + + fn generate_random_numbers() -> Vec { + let mut rng = rand::thread_rng(); + <|editable_region_end|> + let mut numbers = Vec::new(); + for _ in 0..5 { + numbers.push(rng.random_range(1..101)); + ```"#} + ); + } +} diff --git a/crates/edit_prediction/src/zeta2.rs b/crates/edit_prediction/src/zeta2.rs new file mode 100644 index 0000000000000000000000000000000000000000..9706e2b9ecd03f6e8ba592210722725f420643d3 --- /dev/null +++ b/crates/edit_prediction/src/zeta2.rs @@ -0,0 +1,243 @@ +#[cfg(feature = "cli-support")] +use crate::EvalCacheEntryKind; +use crate::open_ai_response::text_from_response; +use crate::prediction::EditPredictionResult; +use crate::{ + DebugEvent, EDIT_PREDICTIONS_MODEL_ID, EditPredictionFinishedDebugEvent, EditPredictionId, + EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, +}; +use anyhow::{Result, anyhow}; +use cloud_llm_client::EditPredictionRejectReason; +use gpui::{Task, prelude::*}; +use language::{OffsetRangeExt as _, ToOffset as _, ToPoint}; +use release_channel::AppVersion; +use std::{path::Path, sync::Arc, time::Instant}; +use zeta_prompt::CURSOR_MARKER; +use zeta_prompt::format_zeta_prompt; + +const MAX_CONTEXT_TOKENS: usize = 150; +const MAX_REWRITE_TOKENS: usize = 350; + +pub fn request_prediction_with_zeta2( + store: &mut EditPredictionStore, + EditPredictionModelInput { + buffer, + snapshot, + position, + related_files, + events, + debug_tx, + .. + }: EditPredictionModelInput, + cx: &mut Context, +) -> Task>> { + let buffer_snapshotted_at = Instant::now(); + + let Some(excerpt_path) = snapshot + .file() + .map(|file| -> Arc { file.full_path(cx).into() }) + else { + return Task::ready(Err(anyhow!("No file path for excerpt"))); + }; + + let client = store.client.clone(); + let llm_token = store.llm_token.clone(); + let app_version = AppVersion::global(cx); + + #[cfg(feature = "cli-support")] + let eval_cache = store.eval_cache.clone(); + + let request_task = cx.background_spawn({ + async move { + let cursor_offset = position.to_offset(&snapshot); + let (editable_offset_range, prompt_input) = zeta2_prompt_input( + &snapshot, + related_files, + events, + excerpt_path, + cursor_offset, + ); + + let prompt = format_zeta_prompt(&prompt_input); + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionStarted( + EditPredictionStartedDebugEvent { + buffer: buffer.downgrade(), + prompt: Some(prompt.clone()), + position, + }, + )) + .ok(); + } + + let request = open_ai::Request { + model: EDIT_PREDICTIONS_MODEL_ID.clone(), + messages: vec![open_ai::RequestMessage::User { + content: open_ai::MessageContent::Plain(prompt), + }], + stream: false, + max_completion_tokens: None, + stop: Default::default(), + temperature: Default::default(), + tool_choice: None, + parallel_tool_calls: None, + tools: vec![], + prompt_cache_key: None, + reasoning_effort: None, + }; + + log::trace!("Sending edit prediction request"); + + let response = EditPredictionStore::send_raw_llm_request( + request, + client, + llm_token, + app_version, + #[cfg(feature = "cli-support")] + eval_cache, + #[cfg(feature = "cli-support")] + EvalCacheEntryKind::Prediction, + ) + .await; + let received_response_at = Instant::now(); + + log::trace!("Got edit prediction response"); + + let (res, usage) = response?; + let request_id = EditPredictionId(res.id.clone().into()); + let Some(mut output_text) = text_from_response(res) else { + return Ok((Some((request_id, None)), usage)); + }; + + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(DebugEvent::EditPredictionFinished( + EditPredictionFinishedDebugEvent { + buffer: buffer.downgrade(), + position, + model_output: Some(output_text.clone()), + }, + )) + .ok(); + } + + if output_text.contains(CURSOR_MARKER) { + log::trace!("Stripping out {CURSOR_MARKER} from response"); + output_text = output_text.replace(CURSOR_MARKER, ""); + } + + let old_text = snapshot + .text_for_range(editable_offset_range.clone()) + .collect::(); + let edits: Vec<_> = language::text_diff(&old_text, &output_text) + .into_iter() + .map(|(range, text)| { + ( + snapshot.anchor_after(editable_offset_range.start + range.start) + ..snapshot.anchor_before(editable_offset_range.start + range.end), + text, + ) + }) + .collect(); + + anyhow::Ok(( + Some(( + request_id, + Some(( + prompt_input, + buffer, + snapshot.clone(), + edits, + received_response_at, + )), + )), + usage, + )) + } + }); + + cx.spawn(async move |this, cx| { + let Some((id, prediction)) = + EditPredictionStore::handle_api_response(&this, request_task.await, cx)? + else { + return Ok(None); + }; + + let Some((inputs, edited_buffer, edited_buffer_snapshot, edits, received_response_at)) = + prediction + else { + return Ok(Some(EditPredictionResult { + id, + prediction: Err(EditPredictionRejectReason::Empty), + })); + }; + + Ok(Some( + EditPredictionResult::new( + id, + &edited_buffer, + &edited_buffer_snapshot, + edits.into(), + buffer_snapshotted_at, + received_response_at, + inputs, + cx, + ) + .await, + )) + }) +} + +pub fn zeta2_prompt_input( + snapshot: &language::BufferSnapshot, + related_files: Arc<[zeta_prompt::RelatedFile]>, + events: Vec>, + excerpt_path: Arc, + cursor_offset: usize, +) -> (std::ops::Range, zeta_prompt::ZetaPromptInput) { + let cursor_point = cursor_offset.to_point(snapshot); + + let (editable_range, context_range) = + crate::cursor_excerpt::editable_and_context_ranges_for_cursor_position( + cursor_point, + snapshot, + MAX_CONTEXT_TOKENS, + MAX_REWRITE_TOKENS, + ); + + let context_start_offset = context_range.start.to_offset(snapshot); + let editable_offset_range = editable_range.to_offset(snapshot); + let cursor_offset_in_excerpt = cursor_offset - context_start_offset; + let editable_range_in_excerpt = (editable_offset_range.start - context_start_offset) + ..(editable_offset_range.end - context_start_offset); + + let prompt_input = zeta_prompt::ZetaPromptInput { + cursor_path: excerpt_path, + cursor_excerpt: snapshot + .text_for_range(context_range) + .collect::() + .into(), + editable_range_in_excerpt, + cursor_offset_in_excerpt, + events, + related_files, + }; + (editable_offset_range, prompt_input) +} + +#[cfg(feature = "cli-support")] +pub fn zeta2_output_for_patch(input: &zeta_prompt::ZetaPromptInput, patch: &str) -> Result { + let text = &input.cursor_excerpt; + let editable_region = input.editable_range_in_excerpt.clone(); + let old_prefix = &text[..editable_region.start]; + let old_suffix = &text[editable_region.end..]; + + let new = crate::udiff::apply_diff_to_string(patch, text)?; + if !new.starts_with(old_prefix) || !new.ends_with(old_suffix) { + anyhow::bail!("Patch shouldn't affect text outside of editable region"); + } + + Ok(new[editable_region.start..new.len() - old_suffix.len()].to_string()) +} diff --git a/crates/zeta_cli/Cargo.toml b/crates/edit_prediction_cli/Cargo.toml similarity index 68% rename from crates/zeta_cli/Cargo.toml rename to crates/edit_prediction_cli/Cargo.toml index 2e62f2a4462e31b7632aa5e825ea76a4b7df5fc8..b6bace2a2c080626126af96f9ef51e435d6ab8fa 100644 --- a/crates/zeta_cli/Cargo.toml +++ b/crates/edit_prediction_cli/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "zeta_cli" +name = "edit_prediction_cli" version = "0.1.0" edition.workspace = true publish.workspace = true @@ -9,38 +9,37 @@ license = "GPL-3.0-or-later" workspace = true [[bin]] -name = "zeta" +name = "ep" path = "src/main.rs" [dependencies] - anyhow.workspace = true +anthropic.workspace = true +http_client.workspace = true chrono.workspace = true clap.workspace = true client.workspace = true cloud_llm_client.workspace= true -cloud_zeta2_prompt.workspace= true collections.workspace = true debug_adapter_extension.workspace = true -edit_prediction_context.workspace = true +dirs.workspace = true extension.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true gpui_tokio.workspace = true +indoc.workspace = true language.workspace = true language_extension.workspace = true language_model.workspace = true language_models.workspace = true languages = { workspace = true, features = ["load-grammars"] } +libc.workspace = true log.workspace = true node_runtime.workspace = true -ordered-float.workspace = true paths.workspace = true -polars = { version = "0.51", features = ["lazy", "dtype-struct", "parquet"] } project.workspace = true prompt_store.workspace = true -pulldown-cmark.workspace = true release_channel.workspace = true reqwest_client.workspace = true serde.workspace = true @@ -48,14 +47,22 @@ serde_json.workspace = true settings.workspace = true shellexpand.workspace = true smol.workspace = true -soa-rs = "0.8.1" +sqlez.workspace = true +sqlez_macros.workspace = true terminal_view.workspace = true -toml.workspace = true util.workspace = true watch.workspace = true -zeta.workspace = true -zeta2 = { workspace = true, features = ["llm-response-cache"] } -zlog.workspace = true +edit_prediction = { workspace = true, features = ["cli-support"] } +wasmtime.workspace = true +zeta_prompt.workspace = true + +# Wasmtime is included as a dependency in order to enable the same +# features that are enabled in Zed. +# +# If we don't enable these features we get crashes when creating +# a Tree-sitter WasmStore. +[package.metadata.cargo-machete] +ignored = ["wasmtime"] [dev-dependencies] indoc.workspace = true diff --git a/crates/edit_prediction_button/LICENSE-GPL b/crates/edit_prediction_cli/LICENSE-GPL similarity index 100% rename from crates/edit_prediction_button/LICENSE-GPL rename to crates/edit_prediction_cli/LICENSE-GPL diff --git a/crates/zeta_cli/build.rs b/crates/edit_prediction_cli/build.rs similarity index 100% rename from crates/zeta_cli/build.rs rename to crates/edit_prediction_cli/build.rs diff --git a/crates/edit_prediction_cli/src/anthropic_client.rs b/crates/edit_prediction_cli/src/anthropic_client.rs new file mode 100644 index 0000000000000000000000000000000000000000..8afc4d1c03f8a37ae258cc2926daf85caebe3d8a --- /dev/null +++ b/crates/edit_prediction_cli/src/anthropic_client.rs @@ -0,0 +1,418 @@ +use anthropic::{ + ANTHROPIC_API_URL, Message, Request as AnthropicRequest, RequestContent, + Response as AnthropicResponse, Role, non_streaming_completion, +}; +use anyhow::Result; +use http_client::HttpClient; +use indoc::indoc; +use reqwest_client::ReqwestClient; +use sqlez::bindable::Bind; +use sqlez::bindable::StaticColumnCount; +use sqlez_macros::sql; +use std::hash::Hash; +use std::hash::Hasher; +use std::path::Path; +use std::sync::Arc; + +pub struct PlainLlmClient { + http_client: Arc, + api_key: String, +} + +impl PlainLlmClient { + fn new() -> Result { + let http_client: Arc = Arc::new(ReqwestClient::new()); + let api_key = std::env::var("ANTHROPIC_API_KEY") + .map_err(|_| anyhow::anyhow!("ANTHROPIC_API_KEY environment variable not set"))?; + Ok(Self { + http_client, + api_key, + }) + } + + async fn generate( + &self, + model: &str, + max_tokens: u64, + messages: Vec, + ) -> Result { + let request = AnthropicRequest { + model: model.to_string(), + max_tokens, + messages, + tools: Vec::new(), + thinking: None, + tool_choice: None, + system: None, + metadata: None, + stop_sequences: Vec::new(), + temperature: None, + top_k: None, + top_p: None, + }; + + let response = non_streaming_completion( + self.http_client.as_ref(), + ANTHROPIC_API_URL, + &self.api_key, + request, + None, + ) + .await + .map_err(|e| anyhow::anyhow!("{:?}", e))?; + + Ok(response) + } +} + +pub struct BatchingLlmClient { + connection: sqlez::connection::Connection, + http_client: Arc, + api_key: String, +} + +struct CacheRow { + request_hash: String, + request: Option, + response: Option, + batch_id: Option, +} + +impl StaticColumnCount for CacheRow { + fn column_count() -> usize { + 4 + } +} + +impl Bind for CacheRow { + fn bind(&self, statement: &sqlez::statement::Statement, start_index: i32) -> Result { + let next_index = statement.bind(&self.request_hash, start_index)?; + let next_index = statement.bind(&self.request, next_index)?; + let next_index = statement.bind(&self.response, next_index)?; + let next_index = statement.bind(&self.batch_id, next_index)?; + Ok(next_index) + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct SerializableRequest { + model: String, + max_tokens: u64, + messages: Vec, +} + +#[derive(serde::Serialize, serde::Deserialize)] +struct SerializableMessage { + role: String, + content: String, +} + +impl BatchingLlmClient { + fn new(cache_path: &Path) -> Result { + let http_client: Arc = Arc::new(ReqwestClient::new()); + let api_key = std::env::var("ANTHROPIC_API_KEY") + .map_err(|_| anyhow::anyhow!("ANTHROPIC_API_KEY environment variable not set"))?; + + let connection = sqlez::connection::Connection::open_file(&cache_path.to_str().unwrap()); + let mut statement = sqlez::statement::Statement::prepare( + &connection, + indoc! {" + CREATE TABLE IF NOT EXISTS cache ( + request_hash TEXT PRIMARY KEY, + request TEXT, + response TEXT, + batch_id TEXT + ); + "}, + )?; + statement.exec()?; + drop(statement); + + Ok(Self { + connection, + http_client, + api_key, + }) + } + + pub fn lookup( + &self, + model: &str, + max_tokens: u64, + messages: &[Message], + ) -> Result> { + let request_hash_str = Self::request_hash(model, max_tokens, messages); + let response: Vec = self.connection.select_bound( + &sql!(SELECT response FROM cache WHERE request_hash = ?1 AND response IS NOT NULL;), + )?(request_hash_str.as_str())?; + Ok(response + .into_iter() + .next() + .and_then(|text| serde_json::from_str(&text).ok())) + } + + pub fn mark_for_batch(&self, model: &str, max_tokens: u64, messages: &[Message]) -> Result<()> { + let request_hash = Self::request_hash(model, max_tokens, messages); + + let serializable_messages: Vec = messages + .iter() + .map(|msg| SerializableMessage { + role: match msg.role { + Role::User => "user".to_string(), + Role::Assistant => "assistant".to_string(), + }, + content: message_content_to_string(&msg.content), + }) + .collect(); + + let serializable_request = SerializableRequest { + model: model.to_string(), + max_tokens, + messages: serializable_messages, + }; + + let request = Some(serde_json::to_string(&serializable_request)?); + let cache_row = CacheRow { + request_hash, + request, + response: None, + batch_id: None, + }; + self.connection.exec_bound(sql!( + INSERT OR IGNORE INTO cache(request_hash, request, response, batch_id) VALUES (?, ?, ?, ?)))?( + cache_row, + ) + } + + async fn generate( + &self, + model: &str, + max_tokens: u64, + messages: Vec, + ) -> Result> { + let response = self.lookup(model, max_tokens, &messages)?; + if let Some(response) = response { + return Ok(Some(response)); + } + + self.mark_for_batch(model, max_tokens, &messages)?; + + Ok(None) + } + + /// Uploads pending requests as a new batch; downloads finished batches if any. + async fn sync_batches(&self) -> Result<()> { + self.upload_pending_requests().await?; + self.download_finished_batches().await + } + + async fn download_finished_batches(&self) -> Result<()> { + let q = sql!(SELECT DISTINCT batch_id FROM cache WHERE batch_id IS NOT NULL AND response IS NULL); + let batch_ids: Vec = self.connection.select(q)?()?; + + for batch_id in batch_ids { + let batch_status = anthropic::batches::retrieve_batch( + self.http_client.as_ref(), + ANTHROPIC_API_URL, + &self.api_key, + &batch_id, + ) + .await + .map_err(|e| anyhow::anyhow!("{:?}", e))?; + + log::info!( + "Batch {} status: {}", + batch_id, + batch_status.processing_status + ); + + if batch_status.processing_status == "ended" { + let results = anthropic::batches::retrieve_batch_results( + self.http_client.as_ref(), + ANTHROPIC_API_URL, + &self.api_key, + &batch_id, + ) + .await + .map_err(|e| anyhow::anyhow!("{:?}", e))?; + + let mut success_count = 0; + for result in results { + let request_hash = result + .custom_id + .strip_prefix("req_hash_") + .unwrap_or(&result.custom_id) + .to_string(); + + match result.result { + anthropic::batches::BatchResult::Succeeded { message } => { + let response_json = serde_json::to_string(&message)?; + let q = sql!(UPDATE cache SET response = ? WHERE request_hash = ?); + self.connection.exec_bound(q)?((response_json, request_hash))?; + success_count += 1; + } + anthropic::batches::BatchResult::Errored { error } => { + log::error!("Batch request {} failed: {:?}", request_hash, error); + } + anthropic::batches::BatchResult::Canceled => { + log::warn!("Batch request {} was canceled", request_hash); + } + anthropic::batches::BatchResult::Expired => { + log::warn!("Batch request {} expired", request_hash); + } + } + } + log::info!("Downloaded {} successful requests", success_count); + } + } + + Ok(()) + } + + async fn upload_pending_requests(&self) -> Result { + let q = sql!( + SELECT request_hash, request FROM cache WHERE batch_id IS NULL AND response IS NULL + ); + + let rows: Vec<(String, String)> = self.connection.select(q)?()?; + + if rows.is_empty() { + return Ok(String::new()); + } + + let batch_requests = rows + .iter() + .map(|(hash, request_str)| { + let serializable_request: SerializableRequest = + serde_json::from_str(&request_str).unwrap(); + + let messages: Vec = serializable_request + .messages + .into_iter() + .map(|msg| Message { + role: match msg.role.as_str() { + "user" => Role::User, + "assistant" => Role::Assistant, + _ => Role::User, + }, + content: vec![RequestContent::Text { + text: msg.content, + cache_control: None, + }], + }) + .collect(); + + let params = AnthropicRequest { + model: serializable_request.model, + max_tokens: serializable_request.max_tokens, + messages, + tools: Vec::new(), + thinking: None, + tool_choice: None, + system: None, + metadata: None, + stop_sequences: Vec::new(), + temperature: None, + top_k: None, + top_p: None, + }; + + let custom_id = format!("req_hash_{}", hash); + anthropic::batches::BatchRequest { custom_id, params } + }) + .collect::>(); + + let batch_len = batch_requests.len(); + let batch = anthropic::batches::create_batch( + self.http_client.as_ref(), + ANTHROPIC_API_URL, + &self.api_key, + anthropic::batches::CreateBatchRequest { + requests: batch_requests, + }, + ) + .await + .map_err(|e| anyhow::anyhow!("{:?}", e))?; + + let q = sql!( + UPDATE cache SET batch_id = ? WHERE batch_id is NULL + ); + self.connection.exec_bound(q)?(batch.id.as_str())?; + + log::info!("Uploaded batch with {} requests", batch_len); + + Ok(batch.id) + } + + fn request_hash(model: &str, max_tokens: u64, messages: &[Message]) -> String { + let mut hasher = std::hash::DefaultHasher::new(); + model.hash(&mut hasher); + max_tokens.hash(&mut hasher); + for msg in messages { + message_content_to_string(&msg.content).hash(&mut hasher); + } + let request_hash = hasher.finish(); + format!("{request_hash:016x}") + } +} + +fn message_content_to_string(content: &[RequestContent]) -> String { + content + .iter() + .filter_map(|c| match c { + RequestContent::Text { text, .. } => Some(text.clone()), + _ => None, + }) + .collect::>() + .join("\n") +} + +pub enum AnthropicClient { + // No batching + Plain(PlainLlmClient), + Batch(BatchingLlmClient), + Dummy, +} + +impl AnthropicClient { + pub fn plain() -> Result { + Ok(Self::Plain(PlainLlmClient::new()?)) + } + + pub fn batch(cache_path: &Path) -> Result { + Ok(Self::Batch(BatchingLlmClient::new(cache_path)?)) + } + + #[allow(dead_code)] + pub fn dummy() -> Self { + Self::Dummy + } + + pub async fn generate( + &self, + model: &str, + max_tokens: u64, + messages: Vec, + ) -> Result> { + match self { + AnthropicClient::Plain(plain_llm_client) => plain_llm_client + .generate(model, max_tokens, messages) + .await + .map(Some), + AnthropicClient::Batch(batching_llm_client) => { + batching_llm_client + .generate(model, max_tokens, messages) + .await + } + AnthropicClient::Dummy => panic!("Dummy LLM client is not expected to be used"), + } + } + + pub async fn sync_batches(&self) -> Result<()> { + match self { + AnthropicClient::Plain(_) => Ok(()), + AnthropicClient::Batch(batching_llm_client) => batching_llm_client.sync_batches().await, + AnthropicClient::Dummy => panic!("Dummy LLM client is not expected to be used"), + } + } +} diff --git a/crates/edit_prediction_cli/src/distill.rs b/crates/edit_prediction_cli/src/distill.rs new file mode 100644 index 0000000000000000000000000000000000000000..abfe178ae61b6da522f43c93d40b6000800d0e4d --- /dev/null +++ b/crates/edit_prediction_cli/src/distill.rs @@ -0,0 +1,22 @@ +use anyhow::{Result, anyhow}; +use std::mem; + +use crate::example::Example; + +pub async fn run_distill(example: &mut Example) -> Result<()> { + let [prediction]: [_; 1] = + mem::take(&mut example.predictions) + .try_into() + .map_err(|preds: Vec<_>| { + anyhow!( + "Example has {} predictions, but it should have exactly one", + preds.len() + ) + })?; + + example.spec.expected_patch = prediction.actual_patch; + example.prompt = None; + example.predictions = Vec::new(); + example.score = Vec::new(); + Ok(()) +} diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs new file mode 100644 index 0000000000000000000000000000000000000000..e37619bf224b3fa506516714856cfbc5024ece14 --- /dev/null +++ b/crates/edit_prediction_cli/src/example.rs @@ -0,0 +1,250 @@ +use crate::{PredictionProvider, PromptFormat, metrics::ClassificationMetrics}; +use anyhow::{Context as _, Result}; +use collections::HashMap; +use edit_prediction::example_spec::ExampleSpec; +use edit_prediction::udiff::OpenedBuffers; +use gpui::Entity; +use http_client::Url; +use language::{Anchor, Buffer}; +use project::Project; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::{ + borrow::Cow, + io::{Read, Write}, + path::{Path, PathBuf}, +}; +use zeta_prompt::RelatedFile; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Example { + #[serde(flatten)] + pub spec: ExampleSpec, + + /// The full content of the file where an edit is being predicted, and the + /// actual cursor offset. + #[serde(skip_serializing_if = "Option::is_none")] + pub buffer: Option, + + /// The context retrieved for the prediction. This requires the worktree to + /// be loaded and the language server to be started. + #[serde(skip_serializing_if = "Option::is_none")] + pub context: Option, + + /// The input and expected output from the edit prediction model. + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt: Option, + + /// The actual predictions from the model. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub predictions: Vec, + + /// The scores, for how well the actual predictions match the expected + /// predictions. + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub score: Vec, + + /// The application state used to process this example. + #[serde(skip)] + pub state: Option, +} + +#[derive(Clone, Debug)] +pub struct ExampleState { + pub project: Entity, + pub buffer: Entity, + pub cursor_position: Anchor, + pub _open_buffers: OpenedBuffers, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExampleContext { + pub files: Arc<[RelatedFile]>, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExampleBuffer { + pub content: String, + pub cursor_row: u32, + pub cursor_column: u32, + pub cursor_offset: usize, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExamplePrompt { + pub input: String, + pub expected_output: String, + pub format: PromptFormat, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExamplePrediction { + pub actual_patch: String, + pub actual_output: String, + pub provider: PredictionProvider, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExampleScore { + pub delta_chr_f: f32, + pub line_match: ClassificationMetrics, +} + +impl Example { + pub fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> { + // git@github.com:owner/repo.git + if self.spec.repository_url.contains('@') { + let (owner, repo) = self + .spec + .repository_url + .split_once(':') + .context("expected : in git url")? + .1 + .split_once('/') + .context("expected / in git url")?; + Ok(( + Cow::Borrowed(owner), + Cow::Borrowed(repo.trim_end_matches(".git")), + )) + // http://github.com/owner/repo.git + } else { + let url = Url::parse(&self.spec.repository_url)?; + let mut segments = url.path_segments().context("empty http url")?; + let owner = segments + .next() + .context("expected owner path segment")? + .to_string(); + let repo = segments + .next() + .context("expected repo path segment")? + .trim_end_matches(".git") + .to_string(); + assert!(segments.next().is_none()); + + Ok((owner.into(), repo.into())) + } + } +} + +pub fn read_examples(inputs: &[PathBuf]) -> Vec { + let mut examples = Vec::new(); + + let stdin_path: PathBuf = PathBuf::from("-"); + + let inputs = if inputs.is_empty() { + &[stdin_path] + } else { + inputs + }; + + for path in inputs { + let is_stdin = path.as_path() == Path::new("-"); + let content = if is_stdin { + let mut buffer = String::new(); + std::io::stdin() + .read_to_string(&mut buffer) + .expect("Failed to read from stdin"); + buffer + } else { + std::fs::read_to_string(path) + .unwrap_or_else(|_| panic!("Failed to read path: {:?}", &path)) + }; + let filename = path.file_stem().unwrap().to_string_lossy().to_string(); + let ext = if !is_stdin { + path.extension() + .map(|ext| ext.to_string_lossy().to_string()) + .unwrap_or_else(|| panic!("{} should have an extension", path.display())) + } else { + "jsonl".to_string() + }; + + match ext.as_ref() { + "json" => { + let mut example = + serde_json::from_str::(&content).unwrap_or_else(|error| { + panic!("Failed to parse example file: {}\n{error}", path.display()) + }); + if example.spec.name.is_empty() { + example.spec.name = filename; + } + examples.push(example); + } + "jsonl" => examples.extend( + content + .lines() + .enumerate() + .map(|(line_ix, line)| { + let mut example = + serde_json::from_str::(line).unwrap_or_else(|error| { + panic!( + "Failed to parse example on {}:{}\n{error}", + path.display(), + line_ix + 1 + ) + }); + if example.spec.name.is_empty() { + example.spec.name = format!("{filename}-{line_ix}") + } + example + }) + .collect::>(), + ), + "md" => { + examples.push(parse_markdown_example(filename, &content).unwrap()); + } + ext => { + panic!("{} has invalid example extension `{ext}`", path.display()) + } + } + } + + sort_examples_by_repo_and_rev(&mut examples); + examples +} + +pub fn write_examples(examples: &[Example], output_path: Option<&PathBuf>) { + let mut content = String::new(); + for example in examples { + let line = serde_json::to_string(example).unwrap(); + content.push_str(&line); + content.push('\n'); + } + if let Some(output_path) = output_path { + std::fs::write(output_path, content).expect("Failed to write examples"); + } else { + std::io::stdout().write_all(&content.as_bytes()).unwrap(); + } +} + +pub fn sort_examples_by_repo_and_rev(examples: &mut [Example]) { + examples.sort_by(|a, b| { + a.spec + .repository_url + .cmp(&b.spec.repository_url) + .then(b.spec.revision.cmp(&a.spec.revision)) + }); +} + +pub fn group_examples_by_repo(examples: &mut [Example]) -> Vec> { + let mut examples_by_repo = HashMap::default(); + for example in examples.iter_mut() { + examples_by_repo + .entry(example.spec.repository_url.clone()) + .or_insert_with(Vec::new) + .push(example); + } + examples_by_repo.into_values().collect() +} + +fn parse_markdown_example(name: String, input: &str) -> Result { + let spec = ExampleSpec::from_markdown(name, input)?; + Ok(Example { + spec, + buffer: None, + context: None, + prompt: None, + predictions: Vec::new(), + score: Vec::new(), + state: None, + }) +} diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs new file mode 100644 index 0000000000000000000000000000000000000000..f543d0799b379403f0caa980df76954649e1aceb --- /dev/null +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -0,0 +1,288 @@ +use crate::{ + PromptFormat, + example::{Example, ExamplePrompt}, + headless::EpAppState, + load_project::run_load_project, + progress::{Progress, Step}, + retrieve_context::run_context_retrieval, +}; +use anyhow::{Context as _, Result, ensure}; +use edit_prediction::{ + EditPredictionStore, + zeta2::{zeta2_output_for_patch, zeta2_prompt_input}, +}; +use gpui::AsyncApp; +use std::sync::Arc; +use zeta_prompt::format_zeta_prompt; + +pub async fn run_format_prompt( + example: &mut Example, + prompt_format: PromptFormat, + app_state: Arc, + mut cx: AsyncApp, +) -> Result<()> { + run_context_retrieval(example, app_state.clone(), cx.clone()).await?; + + let _step_progress = Progress::global().start(Step::FormatPrompt, &example.spec.name); + + match prompt_format { + PromptFormat::Teacher => { + let prompt = TeacherPrompt::format_prompt(example); + example.prompt = Some(ExamplePrompt { + input: prompt, + expected_output: example.spec.expected_patch.clone(), // TODO + format: prompt_format, + }); + } + PromptFormat::Zeta2 => { + run_load_project(example, app_state, cx.clone()).await?; + + let ep_store = cx.update(|cx| { + EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") + })??; + + let state = example.state.as_ref().context("state must be set")?; + let snapshot = state.buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + let project = state.project.clone(); + let (_, input) = ep_store.update(&mut cx, |ep_store, cx| { + anyhow::Ok(zeta2_prompt_input( + &snapshot, + example + .context + .as_ref() + .context("context must be set")? + .files + .clone(), + ep_store.edit_history_for_project(&project, cx), + example.spec.cursor_path.clone(), + example + .buffer + .as_ref() + .context("buffer must be set")? + .cursor_offset, + )) + })??; + let prompt = format_zeta_prompt(&input); + let expected_output = + zeta2_output_for_patch(&input, &example.spec.expected_patch.clone())?; + example.prompt = Some(ExamplePrompt { + input: prompt, + expected_output, + format: prompt_format, + }); + } + }; + Ok(()) +} + +pub struct TeacherPrompt; + +impl TeacherPrompt { + const PROMPT: &str = include_str!("teacher.prompt.md"); + pub(crate) const EDITABLE_REGION_START: &str = "<|editable_region_start|>\n"; + pub(crate) const EDITABLE_REGION_END: &str = "<|editable_region_end|>"; + + /// Truncate edit history to this number of last lines + const MAX_HISTORY_LINES: usize = 128; + + pub fn format_prompt(example: &Example) -> String { + let edit_history = Self::format_edit_history(&example.spec.edit_history); + let context = Self::format_context(example); + let editable_region = Self::format_editable_region(example); + + let prompt = Self::PROMPT + .replace("{{context}}", &context) + .replace("{{edit_history}}", &edit_history) + .replace("{{editable_region}}", &editable_region); + + prompt + } + + pub fn parse(example: &Example, response: &str) -> Result { + // Ideally, we should always be able to find cursor position in the retrieved context. + // In reality, sometimes we don't find it for these reasons: + // 1. `example.cursor_position` contains _more_ context than included in the retrieved context + // (can be fixed by getting cursor coordinates at the load_example stage) + // 2. Context retriever just didn't include cursor line. + // + // In that case, fallback to using `cursor_position` as excerpt. + let cursor_file = &example + .buffer + .as_ref() + .context("`buffer` should be filled in in the context collection step")? + .content; + + // Extract updated (new) editable region from the model response + let new_editable_region = extract_last_codeblock(response); + + // Reconstruct old editable region we sent to the model + let old_editable_region = Self::format_editable_region(example); + let old_editable_region = Self::extract_editable_region(&old_editable_region); + ensure!( + cursor_file.contains(&old_editable_region), + "Something's wrong: editable_region is not found in the cursor file" + ); + + // Apply editable region to a larger context and compute diff. + // This is needed to get a better context lines around the editable region + let edited_file = cursor_file.replace(&old_editable_region, &new_editable_region); + let diff = language::unified_diff(&cursor_file, &edited_file); + + let diff = indoc::formatdoc! {" + --- a/{path} + +++ b/{path} + {diff}", + path = example.spec.cursor_path.to_string_lossy(), + diff = diff, + }; + + Ok(diff) + } + + fn format_edit_history(edit_history: &str) -> String { + // Strip comments ("garbage lines") from edit history + let lines = edit_history + .lines() + .filter(|&s| Self::is_udiff_content_line(s)) + .collect::>(); + + let history_lines = if lines.len() > Self::MAX_HISTORY_LINES { + &lines[lines.len() - Self::MAX_HISTORY_LINES..] + } else { + &lines + }; + + if history_lines.is_empty() { + return "(No edit history)".to_string(); + } + + history_lines.join("\n") + } + + fn format_context(example: &Example) -> String { + assert!(example.context.is_some(), "Missing context retriever step"); + + let mut prompt = String::new(); + zeta_prompt::write_related_files(&mut prompt, &example.context.as_ref().unwrap().files); + + prompt + } + + fn format_editable_region(example: &Example) -> String { + let mut result = String::new(); + + let path_str = example.spec.cursor_path.to_string_lossy(); + result.push_str(&format!("`````path=\"{path_str}\"\n")); + result.push_str(Self::EDITABLE_REGION_START); + + // TODO: control number of lines around cursor + result.push_str(&example.spec.cursor_position); + if !example.spec.cursor_position.ends_with('\n') { + result.push('\n'); + } + + result.push_str(&format!("{}\n", Self::EDITABLE_REGION_END)); + result.push_str("`````"); + + result + } + + fn extract_editable_region(text: &str) -> String { + let start = text + .find(Self::EDITABLE_REGION_START) + .map_or(0, |pos| pos + Self::EDITABLE_REGION_START.len()); + let end = text.find(Self::EDITABLE_REGION_END).unwrap_or(text.len()); + + let region = &text[start..end]; + + region.replace("<|user_cursor|>", "") + } + + fn is_udiff_content_line(s: &str) -> bool { + s.starts_with("-") + || s.starts_with("+") + || s.starts_with(" ") + || s.starts_with("---") + || s.starts_with("+++") + || s.starts_with("@@") + } +} + +fn extract_last_codeblock(text: &str) -> String { + let mut last_block = None; + let mut search_start = 0; + + while let Some(start) = text[search_start..].find("```") { + let start = start + search_start; + let bytes = text.as_bytes(); + let mut backtick_end = start; + + while backtick_end < bytes.len() && bytes[backtick_end] == b'`' { + backtick_end += 1; + } + + let backtick_count = backtick_end - start; + let closing_backticks = "`".repeat(backtick_count); + + while backtick_end < bytes.len() && bytes[backtick_end] != b'\n' { + backtick_end += 1; + } + + if let Some(end_pos) = text[backtick_end..].find(&closing_backticks) { + let code_block = &text[backtick_end + 1..backtick_end + end_pos]; + last_block = Some(code_block.to_string()); + search_start = backtick_end + end_pos + backtick_count; + } else { + break; + } + } + + last_block.unwrap_or_else(|| text.to_string()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_last_code_block() { + let text = indoc::indoc! {" + Some thinking + + ``` + first block + ``` + + `````path='something' lines=1:2 + last block + ````` + "}; + let last_block = extract_last_codeblock(text); + assert_eq!(last_block, "last block\n"); + } + + #[test] + fn test_extract_editable_region() { + let text = indoc::indoc! {" + some lines + are + here + <|editable_region_start|> + one + two three + + <|editable_region_end|> + more + lines here + "}; + let parsed = TeacherPrompt::extract_editable_region(text); + assert_eq!( + parsed, + indoc::indoc! {" + one + two three + + "} + ); + } +} diff --git a/crates/zeta_cli/src/headless.rs b/crates/edit_prediction_cli/src/headless.rs similarity index 77% rename from crates/zeta_cli/src/headless.rs rename to crates/edit_prediction_cli/src/headless.rs index 35fbf1313e21bac33f8dfb5325fae365ae133aee..da96e7ef6520e952e2b7696eee6b82c243e90e4e 100644 --- a/crates/zeta_cli/src/headless.rs +++ b/crates/edit_prediction_cli/src/headless.rs @@ -1,4 +1,5 @@ use client::{Client, ProxySettings, UserStore}; +use collections::HashMap; use extension::ExtensionHostProxy; use fs::RealFs; use gpui::http_client::read_proxy_from_env; @@ -7,27 +8,46 @@ use gpui_tokio::Tokio; use language::LanguageRegistry; use language_extension::LspAccess; use node_runtime::{NodeBinaryOptions, NodeRuntime}; -use project::project_settings::ProjectSettings; -use release_channel::AppVersion; +use project::{Project, project_settings::ProjectSettings}; +use release_channel::{AppCommitSha, AppVersion}; use reqwest_client::ReqwestClient; use settings::{Settings, SettingsStore}; use std::path::PathBuf; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use util::ResultExt as _; /// Headless subset of `workspace::AppState`. -pub struct ZetaCliAppState { +pub struct EpAppState { pub languages: Arc, pub client: Arc, pub user_store: Entity, pub fs: Arc, pub node_runtime: NodeRuntime, + pub project_cache: ProjectCache, } -// TODO: dedupe with crates/eval/src/eval.rs -pub fn init(cx: &mut App) -> ZetaCliAppState { - let app_version = AppVersion::load(env!("ZED_PKG_VERSION")); - release_channel::init(app_version, cx); +#[derive(Default)] +pub struct ProjectCache(Mutex>>); + +impl ProjectCache { + pub fn insert(&self, repository_url: String, project: Entity) { + self.0.lock().unwrap().insert(repository_url, project); + } + + pub fn get(&self, repository_url: &String) -> Option> { + self.0.lock().unwrap().get(repository_url).cloned() + } +} + +pub fn init(cx: &mut App) -> EpAppState { + let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned())); + + let app_version = AppVersion::load( + env!("ZED_PKG_VERSION"), + option_env!("ZED_BUILD_ID"), + app_commit_sha, + ); + release_channel::init(app_version.clone(), cx); gpui_tokio::init(cx); let settings_store = SettingsStore::new(cx, &settings::default_settings()); @@ -106,11 +126,14 @@ pub fn init(cx: &mut App) -> ZetaCliAppState { prompt_store::init(cx); terminal_view::init(cx); - ZetaCliAppState { + let project_cache = ProjectCache::default(); + + EpAppState { languages, client, user_store, fs, node_runtime, + project_cache, } } diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs new file mode 100644 index 0000000000000000000000000000000000000000..70daf00b79486fd917556cffaa26b1fd01ed4d28 --- /dev/null +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -0,0 +1,357 @@ +use crate::{ + example::{Example, ExampleBuffer, ExampleState}, + headless::EpAppState, + paths::{REPOS_DIR, WORKTREES_DIR}, + progress::{InfoStyle, Progress, Step, StepProgress}, +}; +use anyhow::{Context as _, Result}; +use collections::HashMap; +use edit_prediction::EditPredictionStore; +use edit_prediction::udiff::OpenedBuffers; +use futures::{ + AsyncWriteExt as _, + lock::{Mutex, OwnedMutexGuard}, +}; +use gpui::{AsyncApp, Entity}; +use language::{Anchor, Buffer, LanguageNotFound, ToOffset, ToPoint}; +use project::buffer_store::BufferStoreEvent; +use project::{Project, ProjectPath}; +use std::{ + cell::RefCell, + fs, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::{paths::PathStyle, rel_path::RelPath}; +use zeta_prompt::CURSOR_MARKER; + +pub async fn run_load_project( + example: &mut Example, + app_state: Arc, + mut cx: AsyncApp, +) -> Result<()> { + if example.state.is_some() { + return Ok(()); + } + + let progress = Progress::global().start(Step::LoadProject, &example.spec.name); + + let project = setup_project(example, &app_state, &progress, &mut cx).await?; + + let _open_buffers = apply_edit_history(example, &project, &mut cx).await?; + + let (buffer, cursor_position) = cursor_position(example, &project, &mut cx).await?; + let (example_buffer, language_name) = buffer.read_with(&cx, |buffer, _cx| { + let cursor_point = cursor_position.to_point(&buffer); + let language_name = buffer + .language() + .map(|l| l.name().to_string()) + .unwrap_or_else(|| "Unknown".to_string()); + ( + ExampleBuffer { + content: buffer.text(), + cursor_row: cursor_point.row, + cursor_column: cursor_point.column, + cursor_offset: cursor_position.to_offset(&buffer), + }, + language_name, + ) + })?; + + progress.set_info(language_name, InfoStyle::Normal); + + example.buffer = Some(example_buffer); + example.state = Some(ExampleState { + buffer, + project, + cursor_position, + _open_buffers, + }); + Ok(()) +} + +async fn cursor_position( + example: &Example, + project: &Entity, + cx: &mut AsyncApp, +) -> Result<(Entity, Anchor)> { + let language_registry = project.read_with(cx, |project, _| project.languages().clone())?; + let result = language_registry + .load_language_for_file_path(&example.spec.cursor_path) + .await; + + if let Err(error) = result + && !error.is::() + { + return Err(error); + } + + let worktree = project.read_with(cx, |project, cx| { + project + .visible_worktrees(cx) + .next() + .context("No visible worktrees") + })??; + + let cursor_path = RelPath::new(&example.spec.cursor_path, PathStyle::Posix) + .context("Failed to create RelPath")? + .into_arc(); + let cursor_buffer = project + .update(cx, |project, cx| { + project.open_buffer( + ProjectPath { + worktree_id: worktree.read(cx).id(), + path: cursor_path, + }, + cx, + ) + })? + .await?; + let cursor_offset_within_excerpt = example + .spec + .cursor_position + .find(CURSOR_MARKER) + .context("missing cursor marker")?; + let mut cursor_excerpt = example.spec.cursor_position.clone(); + cursor_excerpt.replace_range( + cursor_offset_within_excerpt..(cursor_offset_within_excerpt + CURSOR_MARKER.len()), + "", + ); + let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| { + let text = buffer.text(); + + let mut matches = text.match_indices(&cursor_excerpt); + let (excerpt_offset, _) = matches.next().with_context(|| { + format!( + "\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n.Example: {}\nCursor excerpt did not exist in buffer.", + example.spec.name + ) + })?; + anyhow::ensure!( + matches.next().is_none(), + "More than one cursor position match found for {}", + &example.spec.name + ); + Ok(excerpt_offset) + })??; + + let cursor_offset = excerpt_offset + cursor_offset_within_excerpt; + let cursor_anchor = + cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?; + + Ok((cursor_buffer, cursor_anchor)) +} + +async fn setup_project( + example: &mut Example, + app_state: &Arc, + step_progress: &StepProgress, + cx: &mut AsyncApp, +) -> Result> { + let ep_store = cx + .update(|cx| EditPredictionStore::try_global(cx))? + .context("Store should be initialized at init")?; + + let worktree_path = setup_worktree(example, step_progress).await?; + + if let Some(project) = app_state.project_cache.get(&example.spec.repository_url) { + ep_store.update(cx, |ep_store, _| { + ep_store.clear_history_for_project(&project); + })?; + let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?; + let buffers = buffer_store.read_with(cx, |buffer_store, _| { + buffer_store.buffers().collect::>() + })?; + for buffer in buffers { + buffer + .update(cx, |buffer, cx| buffer.reload(cx))? + .await + .ok(); + } + return Ok(project); + } + + let project = cx.update(|cx| { + Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + false, + cx, + ) + })?; + + project + .update(cx, |project, cx| { + project.disable_worktree_scanner(cx); + project.create_worktree(&worktree_path, true, cx) + })? + .await?; + + app_state + .project_cache + .insert(example.spec.repository_url.clone(), project.clone()); + + let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?; + cx.subscribe(&buffer_store, { + let project = project.clone(); + move |_, event, cx| match event { + BufferStoreEvent::BufferAdded(buffer) => { + ep_store.update(cx, |store, cx| store.register_buffer(&buffer, &project, cx)); + } + _ => {} + } + })? + .detach(); + + Ok(project) +} + +async fn setup_worktree(example: &Example, step_progress: &StepProgress) -> Result { + let (repo_owner, repo_name) = example.repo_name().context("failed to get repo name")?; + let repo_dir = REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref()); + let worktree_path = WORKTREES_DIR + .join(repo_owner.as_ref()) + .join(repo_name.as_ref()); + let repo_lock = lock_repo(&repo_dir).await; + + if !repo_dir.is_dir() { + step_progress.set_substatus(format!("cloning {}", repo_name)); + fs::create_dir_all(&repo_dir)?; + run_git(&repo_dir, &["init"]).await?; + run_git( + &repo_dir, + &["remote", "add", "origin", &example.spec.repository_url], + ) + .await?; + } + + // Resolve the example to a revision, fetching it if needed. + let revision = run_git( + &repo_dir, + &[ + "rev-parse", + &format!("{}^{{commit}}", example.spec.revision), + ], + ) + .await; + let revision = if let Ok(revision) = revision { + revision + } else { + step_progress.set_substatus("fetching"); + if run_git( + &repo_dir, + &["fetch", "--depth", "1", "origin", &example.spec.revision], + ) + .await + .is_err() + { + run_git(&repo_dir, &["fetch", "origin"]).await?; + } + let revision = run_git(&repo_dir, &["rev-parse", "FETCH_HEAD"]).await?; + revision + }; + + // Create the worktree for this example if needed. + step_progress.set_substatus("preparing worktree"); + if worktree_path.is_dir() { + run_git(&worktree_path, &["clean", "--force", "-d"]).await?; + run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?; + run_git(&worktree_path, &["checkout", revision.as_str()]).await?; + } else { + let worktree_path_string = worktree_path.to_string_lossy(); + run_git( + &repo_dir, + &["branch", "-f", &example.spec.name, revision.as_str()], + ) + .await?; + run_git( + &repo_dir, + &[ + "worktree", + "add", + "-f", + &worktree_path_string, + &example.spec.name, + ], + ) + .await?; + } + drop(repo_lock); + + // Apply the uncommitted diff for this example. + if !example.spec.uncommitted_diff.is_empty() { + step_progress.set_substatus("applying diff"); + let mut apply_process = smol::process::Command::new("git") + .current_dir(&worktree_path) + .args(&["apply", "-"]) + .stdin(std::process::Stdio::piped()) + .spawn()?; + + let mut stdin = apply_process.stdin.take().context("Failed to get stdin")?; + stdin + .write_all(example.spec.uncommitted_diff.as_bytes()) + .await?; + stdin.close().await?; + drop(stdin); + + let apply_result = apply_process.output().await?; + anyhow::ensure!( + apply_result.status.success(), + "Failed to apply uncommitted diff patch with status: {}\nstderr:\n{}\nstdout:\n{}", + apply_result.status, + String::from_utf8_lossy(&apply_result.stderr), + String::from_utf8_lossy(&apply_result.stdout), + ); + } + + step_progress.clear_substatus(); + Ok(worktree_path) +} + +async fn apply_edit_history( + example: &Example, + project: &Entity, + cx: &mut AsyncApp, +) -> Result { + edit_prediction::udiff::apply_diff(&example.spec.edit_history, project, cx).await +} + +thread_local! { + static REPO_LOCKS: RefCell>>> = RefCell::new(HashMap::default()); +} + +#[must_use] +pub async fn lock_repo(path: impl AsRef) -> OwnedMutexGuard<()> { + REPO_LOCKS + .with(|cell| { + cell.borrow_mut() + .entry(path.as_ref().to_path_buf()) + .or_default() + .clone() + }) + .lock_owned() + .await +} + +async fn run_git(repo_path: &Path, args: &[&str]) -> Result { + let output = smol::process::Command::new("git") + .current_dir(repo_path) + .args(args) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}", + args.join(" "), + repo_path.display(), + output.status, + String::from_utf8_lossy(&output.stderr), + String::from_utf8_lossy(&output.stdout), + ); + Ok(String::from_utf8(output.stdout)?.trim().to_string()) +} diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..dce0fbbed57dbc4b18faf93787cfb8f2341a126a --- /dev/null +++ b/crates/edit_prediction_cli/src/main.rs @@ -0,0 +1,343 @@ +mod anthropic_client; +mod distill; +mod example; +mod format_prompt; +mod headless; +mod load_project; +mod metrics; +mod paths; +mod predict; +mod progress; +mod retrieve_context; +mod score; + +use clap::{Args, CommandFactory, Parser, Subcommand, ValueEnum}; +use edit_prediction::EditPredictionStore; +use gpui::Application; +use reqwest_client::ReqwestClient; +use serde::{Deserialize, Serialize}; +use std::fmt::Display; +use std::{path::PathBuf, sync::Arc}; + +use crate::distill::run_distill; +use crate::example::{group_examples_by_repo, read_examples, write_examples}; +use crate::format_prompt::run_format_prompt; +use crate::load_project::run_load_project; +use crate::paths::FAILED_EXAMPLES_DIR; +use crate::predict::run_prediction; +use crate::progress::Progress; +use crate::retrieve_context::run_context_retrieval; +use crate::score::run_scoring; + +#[derive(Parser, Debug)] +#[command(name = "ep")] +struct EpArgs { + #[arg(long, default_value_t = false)] + printenv: bool, + #[clap(long, default_value_t = 10, global = true)] + max_parallelism: usize, + #[command(subcommand)] + command: Option, + #[clap(global = true)] + inputs: Vec, + #[arg(long, short, global = true)] + output: Option, + #[arg(long, short, global = true)] + in_place: bool, + #[arg(long, short, global = true)] + failfast: bool, +} + +#[derive(Subcommand, Debug)] +enum Command { + /// Parse markdown examples and output a combined .jsonl file + ParseExample, + /// Create git worktrees for each example and load file contents + LoadProject, + /// Retrieve context for input examples. + Context, + /// Generate a prompt string for a specific model + FormatPrompt(FormatPromptArgs), + /// Runs edit prediction + Predict(PredictArgs), + /// Computes a score based on actual and expected patches + Score(PredictArgs), + /// Prepares a distillation dataset by copying expected outputs to + /// predicted outputs and removing actual outputs and prompts. + Distill, + /// Print aggregated scores + Eval(PredictArgs), + /// Remove git repositories and worktrees + Clean, +} + +impl Display for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Command::ParseExample => write!(f, "parse-example"), + Command::LoadProject => write!(f, "load-project"), + Command::Context => write!(f, "context"), + Command::FormatPrompt(format_prompt_args) => write!( + f, + "format-prompt --prompt-format={}", + format_prompt_args + .prompt_format + .to_possible_value() + .unwrap() + .get_name() + ), + Command::Predict(predict_args) => { + write!( + f, + "predict --provider={:?}", + predict_args + .provider + .to_possible_value() + .unwrap() + .get_name() + ) + } + Command::Score(predict_args) => { + write!( + f, + "score --provider={:?}", + predict_args + .provider + .to_possible_value() + .unwrap() + .get_name() + ) + } + Command::Distill => write!(f, "distill"), + Command::Eval(predict_args) => write!( + f, + "eval --provider={:?}", + predict_args + .provider + .to_possible_value() + .unwrap() + .get_name() + ), + Command::Clean => write!(f, "clean"), + } + } +} + +#[derive(Debug, Args)] +struct FormatPromptArgs { + #[clap(long)] + prompt_format: PromptFormat, +} + +#[derive(Clone, Copy, Debug, ValueEnum, Serialize, Deserialize)] +enum PromptFormat { + Teacher, + Zeta2, +} + +#[derive(Debug, Args)] +struct PredictArgs { + #[clap(long)] + provider: PredictionProvider, + #[clap(long, default_value_t = 1)] + repetitions: usize, +} + +#[derive(Clone, Copy, Debug, ValueEnum, Serialize, Deserialize)] +enum PredictionProvider { + Sweep, + Mercury, + Zeta1, + Zeta2, + Teacher, + TeacherNonBatching, +} + +impl EpArgs { + fn output_path(&self) -> Option { + if self.in_place { + if self.inputs.len() == 1 { + self.inputs.first().cloned() + } else { + panic!("--in-place requires exactly one input file") + } + } else { + self.output.clone() + } + } +} + +fn main() { + let args = EpArgs::parse(); + + if args.printenv { + ::util::shell_env::print_env(); + return; + } + + let output = args.output_path(); + let command = match args.command { + Some(cmd) => cmd, + None => { + EpArgs::command().print_help().unwrap(); + return; + } + }; + + match &command { + Command::Clean => { + std::fs::remove_dir_all(&*paths::DATA_DIR).unwrap(); + return; + } + _ => {} + } + + let mut examples = read_examples(&args.inputs); + let http_client = Arc::new(ReqwestClient::new()); + let app = Application::headless().with_http_client(http_client); + + app.run(move |cx| { + let app_state = Arc::new(headless::init(cx)); + EditPredictionStore::global(&app_state.client, &app_state.user_store, cx); + + cx.spawn(async move |cx| { + let result = async { + if let Command::Predict(args) = &command { + predict::sync_batches(&args.provider).await?; + } + + let total_examples = examples.len(); + Progress::global().set_total_examples(total_examples); + + let mut grouped_examples = group_examples_by_repo(&mut examples); + let example_batches = grouped_examples.chunks_mut(args.max_parallelism); + + for example_batch in example_batches { + let futures = example_batch.into_iter().map(|repo_examples| async { + for example in repo_examples.iter_mut() { + let result = async { + match &command { + Command::ParseExample => {} + Command::LoadProject => { + run_load_project(example, app_state.clone(), cx.clone()) + .await?; + } + Command::Context => { + run_context_retrieval( + example, + app_state.clone(), + cx.clone(), + ) + .await?; + } + Command::FormatPrompt(args) => { + run_format_prompt( + example, + args.prompt_format, + app_state.clone(), + cx.clone(), + ) + .await?; + } + Command::Predict(args) => { + run_prediction( + example, + Some(args.provider), + args.repetitions, + app_state.clone(), + cx.clone(), + ) + .await?; + } + Command::Distill => { + run_distill(example).await?; + } + Command::Score(args) | Command::Eval(args) => { + run_scoring(example, &args, app_state.clone(), cx.clone()) + .await?; + } + Command::Clean => { + unreachable!() + } + } + anyhow::Ok(()) + } + .await; + + if let Err(e) = result { + Progress::global().increment_failed(); + let failed_example_path = + FAILED_EXAMPLES_DIR.join(format!("{}.json", example.spec.name)); + app_state + .fs + .write( + &failed_example_path, + &serde_json::to_vec_pretty(&example).unwrap(), + ) + .await + .unwrap(); + let err_path = FAILED_EXAMPLES_DIR + .join(format!("{}_err.txt", example.spec.name)); + app_state + .fs + .write(&err_path, e.to_string().as_bytes()) + .await + .unwrap(); + + let msg = format!( + indoc::indoc! {" + While processing {}: + + {:?} + + Written to: \x1b[36m{}\x1b[0m + + Explore this example data with: + fx \x1b[36m{}\x1b[0m + + Re-run this example with: + cargo run -p edit_prediction_cli -- {} \x1b[36m{}\x1b[0m + "}, + example.spec.name, + e, + err_path.display(), + failed_example_path.display(), + command, + failed_example_path.display(), + ); + if args.failfast || total_examples == 1 { + Progress::global().finalize(); + panic!("{}", msg); + } else { + log::error!("{}", msg); + } + } + } + }); + futures::future::join_all(futures).await; + } + Progress::global().finalize(); + + if args.output.is_some() || !matches!(command, Command::Eval(_)) { + write_examples(&examples, output.as_ref()); + } + + match &command { + Command::Predict(args) => predict::sync_batches(&args.provider).await?, + Command::Eval(_) => score::print_report(&examples), + _ => (), + }; + + anyhow::Ok(()) + } + .await; + + if let Err(e) = result { + panic!("Fatal error: {:?}", e); + } + + let _ = cx.update(|cx| cx.quit()); + }) + .detach(); + }); +} diff --git a/crates/edit_prediction_cli/src/metrics.rs b/crates/edit_prediction_cli/src/metrics.rs new file mode 100644 index 0000000000000000000000000000000000000000..b3e5eb8688724c821953a56c4fe82e67c75e13b6 --- /dev/null +++ b/crates/edit_prediction_cli/src/metrics.rs @@ -0,0 +1,371 @@ +use collections::{HashMap, HashSet}; +use edit_prediction::udiff::DiffLine; +use serde::{Deserialize, Serialize}; + +type Counts = HashMap; +type CountsDelta = HashMap; + +#[derive(Default, Debug, Clone, Serialize, Deserialize)] +pub struct ClassificationMetrics { + pub true_positives: usize, + pub false_positives: usize, + pub false_negatives: usize, +} + +impl ClassificationMetrics { + pub fn from_sets( + expected: &HashSet, + actual: &HashSet, + ) -> ClassificationMetrics { + let true_positives = expected.intersection(actual).count(); + let false_positives = actual.difference(expected).count(); + let false_negatives = expected.difference(actual).count(); + + ClassificationMetrics { + true_positives, + false_positives, + false_negatives, + } + } + + pub fn from_counts(expected: &Counts, actual: &Counts) -> ClassificationMetrics { + let mut true_positives = 0; + let mut false_positives = 0; + let mut false_negatives = 0; + + for (ngram, &expected_count) in expected { + let actual_count = *actual.get(ngram).unwrap_or(&0); + if actual_count > expected_count { + false_positives += actual_count - expected_count; + } else { + false_negatives += expected_count - actual_count; + } + true_positives += expected_count.min(actual_count); + } + + for (ngram, &actual_count) in actual { + if !expected.contains_key(ngram) { + false_positives += actual_count; + } + } + + ClassificationMetrics { + true_positives, + false_positives, + false_negatives, + } + } + + pub fn aggregate<'a>( + scores: impl Iterator, + ) -> ClassificationMetrics { + let mut true_positives = 0; + let mut false_positives = 0; + let mut false_negatives = 0; + + for score in scores { + true_positives += score.true_positives; + false_positives += score.false_positives; + false_negatives += score.false_negatives; + } + + ClassificationMetrics { + true_positives, + false_positives, + false_negatives, + } + } + + pub fn precision(&self) -> f64 { + if self.true_positives + self.false_positives == 0 { + 0.0 + } else { + self.true_positives as f64 / (self.true_positives + self.false_positives) as f64 + } + } + + pub fn recall(&self) -> f64 { + if self.true_positives + self.false_negatives == 0 { + 0.0 + } else { + self.true_positives as f64 / (self.true_positives + self.false_negatives) as f64 + } + } + + pub fn f1_score(&self) -> f64 { + let recall = self.recall(); + let precision = self.precision(); + if precision + recall == 0.0 { + 0.0 + } else { + 2.0 * precision * recall / (precision + recall) + } + } +} + +pub fn line_match_score( + expected_patch: &[DiffLine], + actual_patch: &[DiffLine], +) -> ClassificationMetrics { + let expected_change_lines = expected_patch + .iter() + .filter(|line| matches!(line, DiffLine::Addition(_) | DiffLine::Deletion(_))) + .map(|line| line.to_string()) + .collect(); + + let actual_change_lines = actual_patch + .iter() + .filter(|line| matches!(line, DiffLine::Addition(_) | DiffLine::Deletion(_))) + .map(|line| line.to_string()) + .collect(); + + ClassificationMetrics::from_sets(&expected_change_lines, &actual_change_lines) +} + +enum ChrfWhitespace { + #[allow(unused)] + Unchanged, + Ignore, +} + +const CHR_F_CHAR_ORDER: usize = 6; +const CHR_F_BETA: f64 = 2.0; +const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Ignore; + +/// Computes a delta-chrF score that compares two sets of edits. +/// +/// This metric works by: +/// 1. Reconstructing original, golden (expected result), and actual texts from diffs +/// 2. Computing n-gram count differences (deltas) between original→golden and original→actual +/// 3. Comparing these deltas to measure how well actual edits match expected edits +pub fn delta_chr_f(expected: &[DiffLine], actual: &[DiffLine]) -> f64 { + // Reconstruct texts from diffs + let mut original_text = String::new(); // state of the text before any edits + let mut golden_text = String::new(); // text after applying golden edits + let mut actual_text = String::new(); // text after applying actual edits + + for line in expected { + match line { + DiffLine::Context(s) => { + original_text.push_str(s); + golden_text.push_str(s); + } + DiffLine::Deletion(s) => { + original_text.push_str(s); + } + DiffLine::Addition(s) => { + golden_text.push_str(s); + } + _ => {} + } + } + + for line in actual { + match line { + DiffLine::Context(s) | DiffLine::Addition(s) => { + actual_text.push_str(s); + } + _ => {} + } + } + + // Edge case + if original_text == golden_text && golden_text == actual_text { + return 100.0; + } + + // Compute the metric + let original_ngrams = chr_f_ngram_counts(&original_text); + let golden_ngrams = chr_f_ngram_counts(&golden_text); + let actual_ngrams = chr_f_ngram_counts(&actual_text); + + let mut total_precision = 0.0; + let mut total_recall = 0.0; + + for order in 0..CHR_F_CHAR_ORDER { + let expected_delta = compute_ngram_delta(&golden_ngrams[order], &original_ngrams[order]); + let actual_delta = compute_ngram_delta(&actual_ngrams[order], &original_ngrams[order]); + + if expected_delta.is_empty() && actual_delta.is_empty() { + total_precision += 1.0; + total_recall += 1.0; + continue; + } + + let expected_counts = ngram_delta_to_counts(&expected_delta); + let actual_counts = ngram_delta_to_counts(&actual_delta); + + let score = ClassificationMetrics::from_counts(&expected_counts, &actual_counts); + total_precision += score.precision(); + total_recall += score.recall(); + } + + let prec = total_precision / CHR_F_CHAR_ORDER as f64; + let recall = total_recall / CHR_F_CHAR_ORDER as f64; + let f_score = if prec + recall == 0.0 { + 0.0 + } else { + (1.0 + CHR_F_BETA * CHR_F_BETA) * prec * recall / (CHR_F_BETA * CHR_F_BETA * prec + recall) + }; + + f_score * 100.0 +} + +fn chr_f_ngram_counts(text: &str) -> Vec { + // Ignore whitespace. The original chrF implementation skips all + // whitespace. We should consider compressing multiple consecutive + // spaces into one -- this may reflect our task more closely. + let text = match CHR_F_WHITESPACE { + ChrfWhitespace::Unchanged => text.to_string(), + ChrfWhitespace::Ignore => text + .chars() + .filter(|c| !c.is_whitespace()) + .collect::(), + }; + + (1..=CHR_F_CHAR_ORDER) + .map(|order| count_ngrams(&text, order)) + .collect() +} + +fn compute_ngram_delta(after: &Counts, before: &Counts) -> CountsDelta { + let mut delta = CountsDelta::default(); + + for (ngram, &before_count) in before { + let after_count = *after.get(ngram).unwrap_or(&0); + delta.insert(ngram.clone(), after_count as isize - before_count as isize); + } + + for (ngram, &after_count) in after { + if !before.contains_key(ngram) { + delta.insert(ngram.clone(), after_count as isize); + } + } + + delta +} + +/// Convert negative counts to special deletion tokens. +/// For example, if expected delta is {"foo": -1} and actual delta is {"bar": -1}, +/// we convert it to {"¬foo": +1} and {"¬bar": +1}. This way _not_ deleting "foo" +/// will result in a false negative, and mistakenly deleting "bar" will result in a false positive. +fn ngram_delta_to_counts(delta: &CountsDelta) -> Counts { + let mut counts = Counts::default(); + + for (ngram, &delta) in delta { + if delta > 0 { + counts.insert(ngram.clone(), delta as usize); + } else { + counts.insert(format!("¬{ngram}"), delta.unsigned_abs()); + } + } + + counts +} + +fn count_ngrams(text: &str, n: usize) -> Counts { + let chars: Vec = text.chars().collect(); + let mut counts = Counts::default(); + + for window in chars.windows(n) { + let ngram: String = window.iter().collect(); + *counts.entry(ngram).or_insert(0) += 1; + } + + counts +} + +#[cfg(test)] +mod test { + use super::*; + use edit_prediction::udiff::DiffLine; + + #[test] + fn test_delta_chr_f_perfect_match() { + let diff = vec![ + DiffLine::Context("fn main() {"), + DiffLine::Deletion(" println!(\"Hello\");"), + DiffLine::Addition(" println!(\"Hello, World!\");"), + DiffLine::Context("}"), + ]; + + let score = delta_chr_f(&diff, &diff); + assert!((score - 100.0).abs() < 1e-2); + } + + #[test] + fn test_delta_chr_f_wrong_edit() { + // When the edit is wrong + let expected = vec![ + DiffLine::Context("one "), + DiffLine::Deletion("two "), + DiffLine::Context("three"), + ]; + + let actual = vec![ + DiffLine::Context("one "), + DiffLine::Context("two "), + DiffLine::Deletion("three"), + DiffLine::Addition("four"), + ]; + + // Then the score should be low + let score = delta_chr_f(&expected, &actual); + assert!(score > 20.0 && score < 40.0); + } + + #[test] + fn test_delta_chr_f_partial_match() { + let expected = vec![ + DiffLine::Deletion("let x = 42;"), + DiffLine::Addition("let x = 100;"), + ]; + + let actual = vec![ + DiffLine::Deletion("let x = 42;"), + DiffLine::Addition("let x = 99;"), + ]; + + // We got the edit location right, but the replacement text is wrong. + // Deleted ngrams will match, bringing the score somewhere in the middle. + let score = delta_chr_f(&expected, &actual); + assert!(score > 40.0 && score < 60.0); + } + + #[test] + fn test_delta_chr_f_missed_edit() { + // When predictions makes no changes + let expected = vec![ + DiffLine::Context("prefix "), + DiffLine::Deletion("old"), + DiffLine::Addition("new"), + DiffLine::Context(" suffix"), + ]; + + let actual = vec![ + DiffLine::Context("prefix "), + DiffLine::Context("old"), + DiffLine::Context(" suffix"), + ]; + + // Then the score should be low (all expected changes are false negatives) + let score = delta_chr_f(&expected, &actual); + assert!(score < 20.0); + } + + #[test] + fn test_delta_chr_f_extra_edit() { + // When adding unexpected content + let expected = vec![DiffLine::Context("hello"), DiffLine::Context("world")]; + + let actual = vec![ + DiffLine::Context("hello"), + DiffLine::Addition("extra"), + DiffLine::Context("world"), + ]; + + // Then the score should be low (all actual changes are false positives) + let score = delta_chr_f(&expected, &actual); + assert!(score < 20.0); + } +} diff --git a/crates/edit_prediction_cli/src/paths.rs b/crates/edit_prediction_cli/src/paths.rs new file mode 100644 index 0000000000000000000000000000000000000000..e5d420d0e3dbeda9c50b8e5a3683238149dbc604 --- /dev/null +++ b/crates/edit_prediction_cli/src/paths.rs @@ -0,0 +1,27 @@ +use std::{ + path::{Path, PathBuf}, + sync::LazyLock, +}; + +pub static DATA_DIR: LazyLock = LazyLock::new(|| { + let dir = dirs::home_dir().unwrap().join(".zed_ep"); + ensure_dir(&dir) +}); +pub static CACHE_DIR: LazyLock = LazyLock::new(|| ensure_dir(&DATA_DIR.join("cache"))); +pub static REPOS_DIR: LazyLock = LazyLock::new(|| ensure_dir(&DATA_DIR.join("repos"))); +pub static WORKTREES_DIR: LazyLock = + LazyLock::new(|| ensure_dir(&DATA_DIR.join("worktrees"))); +pub static RUN_DIR: LazyLock = LazyLock::new(|| { + DATA_DIR + .join("runs") + .join(chrono::Local::now().format("%d-%m-%y-%H_%M_%S").to_string()) +}); +pub static LATEST_EXAMPLE_RUN_DIR: LazyLock = LazyLock::new(|| DATA_DIR.join("latest")); +pub static LLM_CACHE_DB: LazyLock = LazyLock::new(|| CACHE_DIR.join("llm_cache.sqlite")); +pub static FAILED_EXAMPLES_DIR: LazyLock = + LazyLock::new(|| ensure_dir(&RUN_DIR.join("failed"))); + +fn ensure_dir(path: &Path) -> PathBuf { + std::fs::create_dir_all(path).expect("Failed to create directory"); + path.to_path_buf() +} diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs new file mode 100644 index 0000000000000000000000000000000000000000..aa93c5415dea091164a68b76a34242697aac70e3 --- /dev/null +++ b/crates/edit_prediction_cli/src/predict.rs @@ -0,0 +1,291 @@ +use crate::{ + PredictionProvider, PromptFormat, + anthropic_client::AnthropicClient, + example::{Example, ExamplePrediction}, + format_prompt::{TeacherPrompt, run_format_prompt}, + headless::EpAppState, + load_project::run_load_project, + paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR}, + progress::{InfoStyle, Progress, Step}, + retrieve_context::run_context_retrieval, +}; +use anyhow::Context as _; +use edit_prediction::{DebugEvent, EditPredictionStore}; +use futures::{FutureExt as _, StreamExt as _, future::Shared}; +use gpui::{AppContext as _, AsyncApp, Task}; +use std::{ + fs, + sync::{ + Arc, Mutex, OnceLock, + atomic::{AtomicUsize, Ordering::SeqCst}, + }, +}; + +pub async fn run_prediction( + example: &mut Example, + provider: Option, + repetition_count: usize, + app_state: Arc, + mut cx: AsyncApp, +) -> anyhow::Result<()> { + if !example.predictions.is_empty() { + return Ok(()); + } + + let provider = provider.context("provider is required")?; + + run_context_retrieval(example, app_state.clone(), cx.clone()).await?; + + if matches!( + provider, + PredictionProvider::Teacher | PredictionProvider::TeacherNonBatching + ) { + let _step_progress = Progress::global().start(Step::Predict, &example.spec.name); + + if example.prompt.is_none() { + run_format_prompt(example, PromptFormat::Teacher, app_state.clone(), cx).await?; + } + + let batched = matches!(provider, PredictionProvider::Teacher); + return predict_anthropic(example, repetition_count, batched).await; + } + + run_load_project(example, app_state.clone(), cx.clone()).await?; + + let _step_progress = Progress::global().start(Step::Predict, &example.spec.name); + + if matches!( + provider, + PredictionProvider::Zeta1 | PredictionProvider::Zeta2 + ) { + static AUTHENTICATED: OnceLock>> = OnceLock::new(); + AUTHENTICATED + .get_or_init(|| { + let client = app_state.client.clone(); + cx.spawn(async move |cx| { + if let Err(e) = client.sign_in_with_optional_connect(true, cx).await { + eprintln!("Authentication failed: {}", e); + } + }) + .shared() + }) + .clone() + .await; + } + + let ep_store = cx.update(|cx| { + EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") + })??; + + ep_store.update(&mut cx, |store, _cx| { + let model = match provider { + PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta1, + PredictionProvider::Zeta2 => edit_prediction::EditPredictionModel::Zeta2, + PredictionProvider::Sweep => edit_prediction::EditPredictionModel::Sweep, + PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury, + PredictionProvider::Teacher | PredictionProvider::TeacherNonBatching => { + unreachable!() + } + }; + store.set_edit_prediction_model(model); + })?; + let state = example.state.as_ref().context("state must be set")?; + let run_dir = RUN_DIR.join(&example.spec.name); + + let updated_example = Arc::new(Mutex::new(example.clone())); + let current_run_ix = Arc::new(AtomicUsize::new(0)); + + let mut debug_rx = + ep_store.update(&mut cx, |store, cx| store.debug_info(&state.project, cx))?; + let debug_task = cx.background_spawn({ + let updated_example = updated_example.clone(); + let current_run_ix = current_run_ix.clone(); + let run_dir = run_dir.clone(); + async move { + while let Some(event) = debug_rx.next().await { + let run_ix = current_run_ix.load(SeqCst); + let mut updated_example = updated_example.lock().unwrap(); + + let run_dir = if repetition_count > 1 { + run_dir.join(format!("{:03}", run_ix)) + } else { + run_dir.clone() + }; + + match event { + DebugEvent::EditPredictionStarted(request) => { + assert_eq!(updated_example.predictions.len(), run_ix + 1); + + if let Some(prompt) = request.prompt { + fs::write(run_dir.join("prediction_prompt.md"), &prompt)?; + } + } + DebugEvent::EditPredictionFinished(request) => { + assert_eq!(updated_example.predictions.len(), run_ix + 1); + + if let Some(output) = request.model_output { + fs::write(run_dir.join("prediction_response.md"), &output)?; + updated_example + .predictions + .last_mut() + .unwrap() + .actual_output = output; + } + if run_ix >= repetition_count { + break; + } + } + _ => {} + } + } + anyhow::Ok(()) + } + }); + + for ix in 0..repetition_count { + current_run_ix.store(ix, SeqCst); + let run_dir = if repetition_count > 1 { + run_dir.join(format!("{:03}", ix)) + } else { + run_dir.clone() + }; + + fs::create_dir_all(&run_dir)?; + if LATEST_EXAMPLE_RUN_DIR.is_symlink() { + fs::remove_file(&*LATEST_EXAMPLE_RUN_DIR)?; + } + #[cfg(unix)] + std::os::unix::fs::symlink(&run_dir, &*LATEST_EXAMPLE_RUN_DIR)?; + #[cfg(windows)] + std::os::windows::fs::symlink_dir(&run_dir, &*LATEST_EXAMPLE_RUN_DIR)?; + + updated_example + .lock() + .unwrap() + .predictions + .push(ExamplePrediction { + actual_patch: String::new(), + actual_output: String::new(), + provider, + }); + + let prediction = ep_store + .update(&mut cx, |store, cx| { + store.request_prediction( + &state.project, + &state.buffer, + state.cursor_position, + cloud_llm_client::PredictEditsRequestTrigger::Cli, + cx, + ) + })? + .await?; + + let actual_patch = prediction + .and_then(|prediction| { + let prediction = prediction.prediction.ok()?; + prediction.edit_preview.as_unified_diff(&prediction.edits) + }) + .unwrap_or_default(); + + let has_prediction = !actual_patch.is_empty(); + + updated_example + .lock() + .unwrap() + .predictions + .last_mut() + .unwrap() + .actual_patch = actual_patch; + + if ix == repetition_count - 1 { + let (info, style) = if has_prediction { + ("predicted", InfoStyle::Normal) + } else { + ("no prediction", InfoStyle::Warning) + }; + _step_progress.set_info(info, style); + } + } + + ep_store.update(&mut cx, |store, _| { + store.remove_project(&state.project); + })?; + debug_task.await?; + + *example = Arc::into_inner(updated_example) + .ok_or_else(|| anyhow::anyhow!("Failed to unwrap Arc"))? + .into_inner() + .map_err(|_| anyhow::anyhow!("Failed to unwrap Mutex"))?; + Ok(()) +} + +async fn predict_anthropic( + example: &mut Example, + _repetition_count: usize, + batched: bool, +) -> anyhow::Result<()> { + let llm_model_name = "claude-sonnet-4-5"; + let max_tokens = 16384; + let llm_client = if batched { + AnthropicClient::batch(&crate::paths::LLM_CACHE_DB.as_ref()) + } else { + AnthropicClient::plain() + }; + let llm_client = llm_client.context("Failed to create LLM client")?; + + let prompt = example.prompt.as_ref().context("Prompt is required")?; + + let messages = vec![anthropic::Message { + role: anthropic::Role::User, + content: vec![anthropic::RequestContent::Text { + text: prompt.input.clone(), + cache_control: None, + }], + }]; + + let Some(response) = llm_client + .generate(llm_model_name, max_tokens, messages) + .await? + else { + // Request stashed for batched processing + return Ok(()); + }; + + let actual_output = response + .content + .into_iter() + .filter_map(|content| match content { + anthropic::ResponseContent::Text { text } => Some(text), + _ => None, + }) + .collect::>() + .join("\n"); + + let actual_patch = TeacherPrompt::parse(example, &actual_output)?; + + let prediction = ExamplePrediction { + actual_patch, + actual_output, + provider: PredictionProvider::Teacher, + }; + + example.predictions.push(prediction); + Ok(()) +} + +pub async fn sync_batches(provider: &PredictionProvider) -> anyhow::Result<()> { + match provider { + PredictionProvider::Teacher => { + let cache_path = crate::paths::LLM_CACHE_DB.as_ref(); + let llm_client = + AnthropicClient::batch(cache_path).context("Failed to create LLM client")?; + llm_client + .sync_batches() + .await + .context("Failed to sync batches")?; + } + _ => (), + }; + Ok(()) +} diff --git a/crates/edit_prediction_cli/src/progress.rs b/crates/edit_prediction_cli/src/progress.rs new file mode 100644 index 0000000000000000000000000000000000000000..ddc710f202cc98e5932c234cb6bebcc93b28171c --- /dev/null +++ b/crates/edit_prediction_cli/src/progress.rs @@ -0,0 +1,508 @@ +use std::{ + borrow::Cow, + collections::HashMap, + io::{IsTerminal, Write}, + sync::{Arc, Mutex, OnceLock}, + time::{Duration, Instant}, +}; + +use log::{Level, Log, Metadata, Record}; + +pub struct Progress { + inner: Mutex, +} + +struct ProgressInner { + completed: Vec, + in_progress: HashMap, + is_tty: bool, + terminal_width: usize, + max_example_name_len: usize, + status_lines_displayed: usize, + total_examples: usize, + failed_examples: usize, + last_line_is_logging: bool, +} + +#[derive(Clone)] +struct InProgressTask { + step: Step, + started_at: Instant, + substatus: Option, + info: Option<(String, InfoStyle)>, +} + +struct CompletedTask { + step: Step, + example_name: String, + duration: Duration, + info: Option<(String, InfoStyle)>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum Step { + LoadProject, + Context, + FormatPrompt, + Predict, + Score, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum InfoStyle { + Normal, + Warning, +} + +impl Step { + pub fn label(&self) -> &'static str { + match self { + Step::LoadProject => "Load", + Step::Context => "Context", + Step::FormatPrompt => "Format", + Step::Predict => "Predict", + Step::Score => "Score", + } + } + + fn color_code(&self) -> &'static str { + match self { + Step::LoadProject => "\x1b[33m", + Step::Context => "\x1b[35m", + Step::FormatPrompt => "\x1b[34m", + Step::Predict => "\x1b[32m", + Step::Score => "\x1b[31m", + } + } +} + +static GLOBAL: OnceLock> = OnceLock::new(); +static LOGGER: ProgressLogger = ProgressLogger; + +const MARGIN: usize = 4; +const MAX_STATUS_LINES: usize = 10; + +impl Progress { + /// Returns the global Progress instance, initializing it if necessary. + pub fn global() -> Arc { + GLOBAL + .get_or_init(|| { + let progress = Arc::new(Self { + inner: Mutex::new(ProgressInner { + completed: Vec::new(), + in_progress: HashMap::new(), + is_tty: std::io::stderr().is_terminal(), + terminal_width: get_terminal_width(), + max_example_name_len: 0, + status_lines_displayed: 0, + total_examples: 0, + failed_examples: 0, + last_line_is_logging: false, + }), + }); + let _ = log::set_logger(&LOGGER); + log::set_max_level(log::LevelFilter::Error); + progress + }) + .clone() + } + + pub fn set_total_examples(&self, total: usize) { + let mut inner = self.inner.lock().unwrap(); + inner.total_examples = total; + } + + pub fn increment_failed(&self) { + let mut inner = self.inner.lock().unwrap(); + inner.failed_examples += 1; + } + + /// Prints a message to stderr, clearing and redrawing status lines to avoid corruption. + /// This should be used for any output that needs to appear above the status lines. + fn log(&self, message: &str) { + let mut inner = self.inner.lock().unwrap(); + Self::clear_status_lines(&mut inner); + + if !inner.last_line_is_logging { + let reset = "\x1b[0m"; + let dim = "\x1b[2m"; + let divider = "─".repeat(inner.terminal_width.saturating_sub(MARGIN)); + eprintln!("{dim}{divider}{reset}"); + inner.last_line_is_logging = true; + } + + eprintln!("{}", message); + } + + pub fn start(self: &Arc, step: Step, example_name: &str) -> StepProgress { + let mut inner = self.inner.lock().unwrap(); + + Self::clear_status_lines(&mut inner); + + inner.max_example_name_len = inner.max_example_name_len.max(example_name.len()); + inner.in_progress.insert( + example_name.to_string(), + InProgressTask { + step, + started_at: Instant::now(), + substatus: None, + info: None, + }, + ); + + Self::print_status_lines(&mut inner); + + StepProgress { + progress: self.clone(), + step, + example_name: example_name.to_string(), + } + } + + fn finish(&self, step: Step, example_name: &str) { + let mut inner = self.inner.lock().unwrap(); + + let Some(task) = inner.in_progress.remove(example_name) else { + return; + }; + + if task.step == step { + inner.completed.push(CompletedTask { + step: task.step, + example_name: example_name.to_string(), + duration: task.started_at.elapsed(), + info: task.info, + }); + + Self::clear_status_lines(&mut inner); + Self::print_logging_closing_divider(&mut inner); + Self::print_completed(&inner, inner.completed.last().unwrap()); + Self::print_status_lines(&mut inner); + } else { + inner.in_progress.insert(example_name.to_string(), task); + } + } + + fn print_logging_closing_divider(inner: &mut ProgressInner) { + if inner.last_line_is_logging { + let reset = "\x1b[0m"; + let dim = "\x1b[2m"; + let divider = "─".repeat(inner.terminal_width.saturating_sub(MARGIN)); + eprintln!("{dim}{divider}{reset}"); + inner.last_line_is_logging = false; + } + } + + fn clear_status_lines(inner: &mut ProgressInner) { + if inner.is_tty && inner.status_lines_displayed > 0 { + // Move up and clear each line we previously displayed + for _ in 0..inner.status_lines_displayed { + eprint!("\x1b[A\x1b[K"); + } + let _ = std::io::stderr().flush(); + inner.status_lines_displayed = 0; + } + } + + fn print_completed(inner: &ProgressInner, task: &CompletedTask) { + let duration = format_duration(task.duration); + let name_width = inner.max_example_name_len; + + if inner.is_tty { + let reset = "\x1b[0m"; + let bold = "\x1b[1m"; + let dim = "\x1b[2m"; + + let yellow = "\x1b[33m"; + let info_part = task + .info + .as_ref() + .map(|(s, style)| { + if *style == InfoStyle::Warning { + format!("{yellow}{s}{reset}") + } else { + s.to_string() + } + }) + .unwrap_or_default(); + + let prefix = format!( + "{bold}{color}{label:>12}{reset} {name:12} {name: 0 { + format!(" {} failed ", failed_count) + } else { + String::new() + }; + + let range_label = format!( + " {}/{}/{} ", + done_count, in_progress_count, inner.total_examples + ); + + // Print a divider line with failed count on left, range label on right + let failed_visible_len = strip_ansi_len(&failed_label); + let range_visible_len = range_label.len(); + let middle_divider_len = inner + .terminal_width + .saturating_sub(MARGIN * 2) + .saturating_sub(failed_visible_len) + .saturating_sub(range_visible_len); + let left_divider = "─".repeat(MARGIN); + let middle_divider = "─".repeat(middle_divider_len); + let right_divider = "─".repeat(MARGIN); + eprintln!( + "{dim}{left_divider}{reset}{failed_label}{dim}{middle_divider}{reset}{range_label}{dim}{right_divider}{reset}" + ); + + let mut tasks: Vec<_> = inner.in_progress.iter().collect(); + tasks.sort_by_key(|(name, _)| *name); + + let total_tasks = tasks.len(); + let mut lines_printed = 0; + + for (name, task) in tasks.iter().take(MAX_STATUS_LINES) { + let elapsed = format_duration(task.started_at.elapsed()); + let substatus_part = task + .substatus + .as_ref() + .map(|s| truncate_with_ellipsis(s, 30)) + .unwrap_or_default(); + + let step_label = task.step.label(); + let step_color = task.step.color_code(); + let name_width = inner.max_example_name_len; + + let prefix = format!( + "{bold}{step_color}{step_label:>12}{reset} {name: MAX_STATUS_LINES { + let remaining = total_tasks - MAX_STATUS_LINES; + eprintln!("{:>12} +{remaining} more", ""); + lines_printed += 1; + } + + inner.status_lines_displayed = lines_printed + 1; // +1 for the divider line + let _ = std::io::stderr().flush(); + } + + pub fn finalize(&self) { + let mut inner = self.inner.lock().unwrap(); + Self::clear_status_lines(&mut inner); + + // Print summary if there were failures + if inner.failed_examples > 0 { + let total_processed = inner.completed.len() + inner.failed_examples; + let percentage = if total_processed > 0 { + inner.failed_examples as f64 / total_processed as f64 * 100.0 + } else { + 0.0 + }; + eprintln!( + "\n{} of {} examples failed ({:.1}%)", + inner.failed_examples, total_processed, percentage + ); + } + } +} + +pub struct StepProgress { + progress: Arc, + step: Step, + example_name: String, +} + +impl StepProgress { + pub fn set_substatus(&self, substatus: impl Into>) { + let mut inner = self.progress.inner.lock().unwrap(); + if let Some(task) = inner.in_progress.get_mut(&self.example_name) { + task.substatus = Some(substatus.into().into_owned()); + Progress::clear_status_lines(&mut inner); + Progress::print_status_lines(&mut inner); + } + } + + pub fn clear_substatus(&self) { + let mut inner = self.progress.inner.lock().unwrap(); + if let Some(task) = inner.in_progress.get_mut(&self.example_name) { + task.substatus = None; + Progress::clear_status_lines(&mut inner); + Progress::print_status_lines(&mut inner); + } + } + + pub fn set_info(&self, info: impl Into, style: InfoStyle) { + let mut inner = self.progress.inner.lock().unwrap(); + if let Some(task) = inner.in_progress.get_mut(&self.example_name) { + task.info = Some((info.into(), style)); + } + } +} + +impl Drop for StepProgress { + fn drop(&mut self) { + self.progress.finish(self.step, &self.example_name); + } +} + +struct ProgressLogger; + +impl Log for ProgressLogger { + fn enabled(&self, metadata: &Metadata) -> bool { + metadata.level() <= Level::Info + } + + fn log(&self, record: &Record) { + if !self.enabled(record.metadata()) { + return; + } + + let level_color = match record.level() { + Level::Error => "\x1b[31m", + Level::Warn => "\x1b[33m", + Level::Info => "\x1b[32m", + Level::Debug => "\x1b[34m", + Level::Trace => "\x1b[35m", + }; + let reset = "\x1b[0m"; + let bold = "\x1b[1m"; + + let level_label = match record.level() { + Level::Error => "Error", + Level::Warn => "Warn", + Level::Info => "Info", + Level::Debug => "Debug", + Level::Trace => "Trace", + }; + + let message = format!( + "{bold}{level_color}{level_label:>12}{reset} {}", + record.args() + ); + + if let Some(progress) = GLOBAL.get() { + progress.log(&message); + } else { + eprintln!("{}", message); + } + } + + fn flush(&self) { + let _ = std::io::stderr().flush(); + } +} + +#[cfg(unix)] +fn get_terminal_width() -> usize { + unsafe { + let mut winsize: libc::winsize = std::mem::zeroed(); + if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) == 0 + && winsize.ws_col > 0 + { + winsize.ws_col as usize + } else { + 80 + } + } +} + +#[cfg(not(unix))] +fn get_terminal_width() -> usize { + 80 +} + +fn strip_ansi_len(s: &str) -> usize { + let mut len = 0; + let mut in_escape = false; + for c in s.chars() { + if c == '\x1b' { + in_escape = true; + } else if in_escape { + if c == 'm' { + in_escape = false; + } + } else { + len += 1; + } + } + len +} + +fn truncate_with_ellipsis(s: &str, max_len: usize) -> String { + if s.len() <= max_len { + s.to_string() + } else { + format!("{}…", &s[..max_len.saturating_sub(1)]) + } +} + +fn format_duration(duration: Duration) -> String { + const MINUTE_IN_MILLIS: f32 = 60. * 1000.; + + let millis = duration.as_millis() as f32; + if millis < 1000.0 { + format!("{}ms", millis) + } else if millis < MINUTE_IN_MILLIS { + format!("{:.1}s", millis / 1_000.0) + } else { + format!("{:.1}m", millis / MINUTE_IN_MILLIS) + } +} diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs new file mode 100644 index 0000000000000000000000000000000000000000..abba4504edc6c0733ffd8c0677e2e3304d8100fa --- /dev/null +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -0,0 +1,192 @@ +use crate::{ + example::{Example, ExampleContext}, + headless::EpAppState, + load_project::run_load_project, + progress::{InfoStyle, Progress, Step, StepProgress}, +}; +use anyhow::Context as _; +use collections::HashSet; +use edit_prediction::{DebugEvent, EditPredictionStore}; +use futures::{FutureExt as _, StreamExt as _, channel::mpsc}; +use gpui::{AsyncApp, Entity}; +use language::Buffer; +use project::Project; +use std::sync::Arc; +use std::time::Duration; + +pub async fn run_context_retrieval( + example: &mut Example, + app_state: Arc, + mut cx: AsyncApp, +) -> anyhow::Result<()> { + if example.context.is_some() { + return Ok(()); + } + + run_load_project(example, app_state.clone(), cx.clone()).await?; + + let step_progress: Arc = Progress::global() + .start(Step::Context, &example.spec.name) + .into(); + + let state = example.state.as_ref().unwrap(); + let project = state.project.clone(); + + let _lsp_handle = project.update(&mut cx, |project, cx| { + project.register_buffer_with_language_servers(&state.buffer, cx) + })?; + wait_for_language_servers_to_start(&project, &state.buffer, &step_progress, &mut cx).await?; + + let ep_store = cx.update(|cx| { + EditPredictionStore::try_global(cx).context("EditPredictionStore not initialized") + })??; + + let mut events = ep_store.update(&mut cx, |store, cx| { + store.register_buffer(&state.buffer, &project, cx); + store.set_use_context(true); + store.refresh_context(&project, &state.buffer, state.cursor_position, cx); + store.debug_info(&project, cx) + })?; + + while let Some(event) = events.next().await { + match event { + DebugEvent::ContextRetrievalFinished(_) => { + break; + } + _ => {} + } + } + + let context_files = + ep_store.update(&mut cx, |store, cx| store.context_for_project(&project, cx))?; + + let excerpt_count: usize = context_files.iter().map(|f| f.excerpts.len()).sum(); + step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal); + + example.context = Some(ExampleContext { + files: context_files, + }); + Ok(()) +} + +async fn wait_for_language_servers_to_start( + project: &Entity, + buffer: &Entity, + step_progress: &Arc, + cx: &mut AsyncApp, +) -> anyhow::Result<()> { + let lsp_store = project.read_with(cx, |project, _| project.lsp_store())?; + + let (language_server_ids, mut starting_language_server_ids) = buffer + .update(cx, |buffer, cx| { + lsp_store.update(cx, |lsp_store, cx| { + let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); + let starting_ids = ids + .iter() + .copied() + .filter(|id| !lsp_store.language_server_statuses.contains_key(&id)) + .collect::>(); + (ids, starting_ids) + }) + }) + .unwrap_or_default(); + + step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len())); + + let timeout = cx + .background_executor() + .timer(Duration::from_secs(60 * 5)) + .shared(); + + let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); + let added_subscription = cx.subscribe(project, { + let step_progress = step_progress.clone(); + move |_, event, _| match event { + project::Event::LanguageServerAdded(language_server_id, name, _) => { + step_progress.set_substatus(format!("LSP started: {}", name)); + tx.try_send(*language_server_id).ok(); + } + _ => {} + } + }); + + while !starting_language_server_ids.is_empty() { + futures::select! { + language_server_id = rx.next() => { + if let Some(id) = language_server_id { + starting_language_server_ids.remove(&id); + } + }, + _ = timeout.clone().fuse() => { + return Err(anyhow::anyhow!("LSP wait timed out after 5 minutes")); + } + } + } + + drop(added_subscription); + + if !language_server_ids.is_empty() { + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? + .detach(); + } + + let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); + let subscriptions = [ + cx.subscribe(&lsp_store, { + let step_progress = step_progress.clone(); + move |_, event, _| { + if let project::LspStoreEvent::LanguageServerUpdate { + message: + client::proto::update_language_server::Variant::WorkProgress( + client::proto::LspWorkProgress { + message: Some(message), + .. + }, + ), + .. + } = event + { + step_progress.set_substatus(message.clone()); + } + } + }), + cx.subscribe(project, { + let step_progress = step_progress.clone(); + move |_, event, cx| match event { + project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { + let lsp_store = lsp_store.read(cx); + let name = lsp_store + .language_server_adapter_for_id(*language_server_id) + .unwrap() + .name(); + step_progress.set_substatus(format!("LSP idle: {}", name)); + tx.try_send(*language_server_id).ok(); + } + _ => {} + } + }), + ]; + + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? + .await?; + + let mut pending_language_server_ids = HashSet::from_iter(language_server_ids.into_iter()); + while !pending_language_server_ids.is_empty() { + futures::select! { + language_server_id = rx.next() => { + if let Some(id) = language_server_id { + pending_language_server_ids.remove(&id); + } + }, + _ = timeout.clone().fuse() => { + return Err(anyhow::anyhow!("LSP wait timed out after 5 minutes")); + } + } + } + + drop(subscriptions); + step_progress.clear_substatus(); + Ok(()) +} diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs new file mode 100644 index 0000000000000000000000000000000000000000..7b507e6d19c943de92eb0b22c7d24d4026789fed --- /dev/null +++ b/crates/edit_prediction_cli/src/score.rs @@ -0,0 +1,123 @@ +use crate::{ + PredictArgs, + example::{Example, ExampleScore}, + headless::EpAppState, + metrics::{self, ClassificationMetrics}, + predict::run_prediction, + progress::{Progress, Step}, +}; +use edit_prediction::udiff::DiffLine; +use gpui::AsyncApp; +use std::sync::Arc; + +pub async fn run_scoring( + example: &mut Example, + args: &PredictArgs, + app_state: Arc, + cx: AsyncApp, +) -> anyhow::Result<()> { + run_prediction( + example, + Some(args.provider), + args.repetitions, + app_state, + cx, + ) + .await?; + + let _progress = Progress::global().start(Step::Score, &example.spec.name); + + let expected_patch = parse_patch(&example.spec.expected_patch); + + let mut scores = vec![]; + + for pred in &example.predictions { + let actual_patch = parse_patch(&pred.actual_patch); + let line_match = metrics::line_match_score(&expected_patch, &actual_patch); + let delta_chr_f = metrics::delta_chr_f(&expected_patch, &actual_patch) as f32; + + scores.push(ExampleScore { + delta_chr_f, + line_match, + }); + } + + example.score = scores; + Ok(()) +} + +fn parse_patch(patch: &str) -> Vec> { + patch.lines().map(DiffLine::parse).collect() +} + +pub fn print_report(examples: &[Example]) { + eprintln!( + "──────────────────────────────────────────────────────────────────────────────────────" + ); + eprintln!( + "{:<30} {:>4} {:>4} {:>4} {:>10} {:>8} {:>8} {:>10}", + "Example name", "TP", "FP", "FN", "Precision", "Recall", "F1", "DeltaChrF" + ); + eprintln!( + "──────────────────────────────────────────────────────────────────────────────────────" + ); + + let mut all_line_match_scores = Vec::new(); + let mut all_delta_chr_f_scores = Vec::new(); + + for example in examples { + for score in example.score.iter() { + let line_match = &score.line_match; + + eprintln!( + "{:<30} {:>4} {:>4} {:>4} {:>9.2}% {:>7.2}% {:>7.2}% {:>9.2}", + truncate_name(&example.spec.name, 30), + line_match.true_positives, + line_match.false_positives, + line_match.false_negatives, + line_match.precision() * 100.0, + line_match.recall() * 100.0, + line_match.f1_score() * 100.0, + score.delta_chr_f + ); + + all_line_match_scores.push(line_match.clone()); + all_delta_chr_f_scores.push(score.delta_chr_f); + } + } + + eprintln!( + "──────────────────────────────────────────────────────────────────────────────────────" + ); + + if !all_line_match_scores.is_empty() { + let total_line_match = ClassificationMetrics::aggregate(all_line_match_scores.iter()); + let avg_delta_chr_f: f32 = + all_delta_chr_f_scores.iter().sum::() / all_delta_chr_f_scores.len() as f32; + + eprintln!( + "{:<30} {:>4} {:>4} {:>4} {:>9.2}% {:>7.2}% {:>7.2}% {:>9.2}", + "TOTAL", + total_line_match.true_positives, + total_line_match.false_positives, + total_line_match.false_negatives, + total_line_match.precision() * 100.0, + total_line_match.recall() * 100.0, + total_line_match.f1_score() * 100.0, + avg_delta_chr_f + ); + eprintln!( + "──────────────────────────────────────────────────────────────────────────────────────" + ); + } + + eprintln!("\n"); +} + +fn truncate_name(name: &str, max_len: usize) -> String { + if name.len() <= max_len { + name.to_string() + } else { + format!("{}...", &name[..max_len - 3]) + } +} diff --git a/crates/edit_prediction_cli/src/teacher.prompt.md b/crates/edit_prediction_cli/src/teacher.prompt.md new file mode 100644 index 0000000000000000000000000000000000000000..d629152da6739ec1d603857f6a9ee556c8986fe8 --- /dev/null +++ b/crates/edit_prediction_cli/src/teacher.prompt.md @@ -0,0 +1,53 @@ +# Instructions + +You are a code completion assistant helping a programmer finish their work. Your task is to: + +1. Analyze the edit history to understand what the programmer is trying to achieve +2. Identify any incomplete refactoring or changes that need to be finished +3. Make the remaining edits that a human programmer would logically make next (by rewriting the corresponding code sections) +4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere. + +Focus on: +- Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs) +- Completing any partially-applied changes across the codebase +- Ensuring consistency with the programming style and patterns already established +- Making edits that maintain or improve code quality +- If the programmer started refactoring one instance of a pattern, find and update ALL similar instances +- Don't write a lot of code if you're not sure what to do + +Rules: +- Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals. +- Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code. +- Keep existing formatting unless it's absolutely necessary + +Input format: +- You receive small code fragments called context (structs, field definitions, function signatures, etc.). They may or may not be relevant. +- Never modify the context code. +- You also receive a code snippet between <|editable_region_start|> and <|editable_region_end|>. This is the editable region. +- The cursor position is marked with <|user_cursor|>. + +Output format: +- Return the entire editable region, applying any edits you make. +- Remove the <|user_cursor|> marker. +- Wrap the edited code in a block of exactly five backticks. + +Output example: +````` + // `zed --askpass` Makes zed operate in nc/netcat mode for use with askpass + if let Some(socket) = &args.askpass {{ + askpass::main(socket); + return Ok(()); + }} +````` + +## User Edits History + +{{edit_history}} + +## Code Context + +{{context}} + +## Editable region + +{{editable_region}} diff --git a/crates/edit_prediction_context/Cargo.toml b/crates/edit_prediction_context/Cargo.toml index 6976831b8cbbe2b998f713ff65f1585f28fc3005..731ffc85d159e285ad497c29fba2f74179d4149b 100644 --- a/crates/edit_prediction_context/Cargo.toml +++ b/crates/edit_prediction_context/Cargo.toml @@ -12,41 +12,33 @@ workspace = true path = "src/edit_prediction_context.rs" [dependencies] +parking_lot.workspace = true anyhow.workspace = true -arrayvec.workspace = true cloud_llm_client.workspace = true collections.workspace = true futures.workspace = true gpui.workspace = true -hashbrown.workspace = true -indoc.workspace = true -itertools.workspace = true language.workspace = true -log.workspace = true -ordered-float.workspace = true -postage.workspace = true +lsp.workspace = true project.workspace = true -regex.workspace = true +log.workspace = true serde.workspace = true -slotmap.workspace = true -strum.workspace = true -text.workspace = true +smallvec.workspace = true tree-sitter.workspace = true util.workspace = true +zeta_prompt.workspace = true [dev-dependencies] -clap.workspace = true +env_logger.workspace = true +indoc.workspace = true futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -indoc.workspace = true language = { workspace = true, features = ["test-support"] } +lsp = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true project = {workspace= true, features = ["test-support"]} serde_json.workspace = true settings = {workspace= true, features = ["test-support"]} text = { workspace = true, features = ["test-support"] } -tree-sitter-c.workspace = true -tree-sitter-cpp.workspace = true -tree-sitter-go.workspace = true util = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/edit_prediction_context/src/assemble_excerpts.rs b/crates/edit_prediction_context/src/assemble_excerpts.rs new file mode 100644 index 0000000000000000000000000000000000000000..e337211cf90f0e4fbcb481f836e512b1ceb6477f --- /dev/null +++ b/crates/edit_prediction_context/src/assemble_excerpts.rs @@ -0,0 +1,156 @@ +use language::{BufferSnapshot, OffsetRangeExt as _, Point}; +use std::ops::Range; +use zeta_prompt::RelatedExcerpt; + +#[cfg(not(test))] +const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 512; +#[cfg(test)] +const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 24; + +pub fn assemble_excerpts( + buffer: &BufferSnapshot, + mut input_ranges: Vec>, +) -> Vec { + merge_ranges(&mut input_ranges); + + let mut outline_ranges = Vec::new(); + let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None); + let mut outline_ix = 0; + for input_range in &mut input_ranges { + *input_range = clip_range_to_lines(input_range, false, buffer); + + while let Some(outline_item) = outline_items.get(outline_ix) { + let item_range = clip_range_to_lines(&outline_item.range, false, buffer); + + if item_range.start > input_range.start { + break; + } + + if item_range.end > input_range.start { + let body_range = outline_item + .body_range(buffer) + .map(|body| clip_range_to_lines(&body, true, buffer)) + .filter(|body_range| { + body_range.to_offset(buffer).len() > MAX_OUTLINE_ITEM_BODY_SIZE + }); + + add_outline_item( + item_range.clone(), + body_range.clone(), + buffer, + &mut outline_ranges, + ); + + if let Some(body_range) = body_range + && input_range.start < body_range.start + { + let mut child_outline_ix = outline_ix + 1; + while let Some(next_outline_item) = outline_items.get(child_outline_ix) { + if next_outline_item.range.end > body_range.end { + break; + } + if next_outline_item.depth == outline_item.depth + 1 { + let next_item_range = + clip_range_to_lines(&next_outline_item.range, false, buffer); + + add_outline_item( + next_item_range, + next_outline_item + .body_range(buffer) + .map(|body| clip_range_to_lines(&body, true, buffer)), + buffer, + &mut outline_ranges, + ); + } + child_outline_ix += 1; + } + } + } + + outline_ix += 1; + } + } + + input_ranges.extend_from_slice(&outline_ranges); + merge_ranges(&mut input_ranges); + + input_ranges + .into_iter() + .map(|range| RelatedExcerpt { + row_range: range.start.row..range.end.row, + text: buffer.text_for_range(range).collect(), + }) + .collect() +} + +fn clip_range_to_lines( + range: &Range, + inward: bool, + buffer: &BufferSnapshot, +) -> Range { + let mut range = range.clone(); + if inward { + if range.start.column > 0 { + range.start.column = buffer.line_len(range.start.row); + } + range.end.column = 0; + } else { + range.start.column = 0; + if range.end.column > 0 { + range.end.column = buffer.line_len(range.end.row); + } + } + range +} + +fn add_outline_item( + mut item_range: Range, + body_range: Option>, + buffer: &BufferSnapshot, + outline_ranges: &mut Vec>, +) { + if let Some(mut body_range) = body_range { + if body_range.start.column > 0 { + body_range.start.column = buffer.line_len(body_range.start.row); + } + body_range.end.column = 0; + + let head_range = item_range.start..body_range.start; + if head_range.start < head_range.end { + outline_ranges.push(head_range); + } + + let tail_range = body_range.end..item_range.end; + if tail_range.start < tail_range.end { + outline_ranges.push(tail_range); + } + } else { + item_range.start.column = 0; + item_range.end.column = buffer.line_len(item_range.end.row); + outline_ranges.push(item_range); + } +} + +pub fn merge_ranges(ranges: &mut Vec>) { + ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); + + let mut index = 1; + while index < ranges.len() { + let mut prev_range_end = ranges[index - 1].end; + if prev_range_end.column > 0 { + prev_range_end += Point::new(1, 0); + } + + if (prev_range_end + Point::new(1, 0)) + .cmp(&ranges[index].start) + .is_ge() + { + let removed = ranges.remove(index); + if removed.end.cmp(&ranges[index - 1].end).is_gt() { + ranges[index - 1].end = removed.end; + } + } else { + index += 1; + } + } +} diff --git a/crates/edit_prediction_context/src/declaration.rs b/crates/edit_prediction_context/src/declaration.rs deleted file mode 100644 index cc32640425ecc563b1f24a6c695be1c13199cd73..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/declaration.rs +++ /dev/null @@ -1,350 +0,0 @@ -use cloud_llm_client::predict_edits_v3::{self, Line}; -use language::{Language, LanguageId}; -use project::ProjectEntryId; -use std::ops::Range; -use std::sync::Arc; -use std::{borrow::Cow, path::Path}; -use text::{Bias, BufferId, Rope}; -use util::paths::{path_ends_with, strip_path_suffix}; -use util::rel_path::RelPath; - -use crate::outline::OutlineDeclaration; - -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct Identifier { - pub name: Arc, - pub language_id: LanguageId, -} - -slotmap::new_key_type! { - pub struct DeclarationId; -} - -#[derive(Debug, Clone)] -pub enum Declaration { - File { - project_entry_id: ProjectEntryId, - declaration: FileDeclaration, - cached_path: CachedDeclarationPath, - }, - Buffer { - project_entry_id: ProjectEntryId, - buffer_id: BufferId, - rope: Rope, - declaration: BufferDeclaration, - cached_path: CachedDeclarationPath, - }, -} - -const ITEM_TEXT_TRUNCATION_LENGTH: usize = 1024; - -impl Declaration { - pub fn identifier(&self) -> &Identifier { - match self { - Declaration::File { declaration, .. } => &declaration.identifier, - Declaration::Buffer { declaration, .. } => &declaration.identifier, - } - } - - pub fn parent(&self) -> Option { - match self { - Declaration::File { declaration, .. } => declaration.parent, - Declaration::Buffer { declaration, .. } => declaration.parent, - } - } - - pub fn as_buffer(&self) -> Option<&BufferDeclaration> { - match self { - Declaration::File { .. } => None, - Declaration::Buffer { declaration, .. } => Some(declaration), - } - } - - pub fn as_file(&self) -> Option<&FileDeclaration> { - match self { - Declaration::Buffer { .. } => None, - Declaration::File { declaration, .. } => Some(declaration), - } - } - - pub fn project_entry_id(&self) -> ProjectEntryId { - match self { - Declaration::File { - project_entry_id, .. - } => *project_entry_id, - Declaration::Buffer { - project_entry_id, .. - } => *project_entry_id, - } - } - - pub fn cached_path(&self) -> &CachedDeclarationPath { - match self { - Declaration::File { cached_path, .. } => cached_path, - Declaration::Buffer { cached_path, .. } => cached_path, - } - } - - pub fn item_range(&self) -> Range { - match self { - Declaration::File { declaration, .. } => declaration.item_range.clone(), - Declaration::Buffer { declaration, .. } => declaration.item_range.clone(), - } - } - - pub fn item_line_range(&self) -> Range { - match self { - Declaration::File { declaration, .. } => declaration.item_line_range.clone(), - Declaration::Buffer { - declaration, rope, .. - } => { - Line(rope.offset_to_point(declaration.item_range.start).row) - ..Line(rope.offset_to_point(declaration.item_range.end).row) - } - } - } - - pub fn item_text(&self) -> (Cow<'_, str>, bool) { - match self { - Declaration::File { declaration, .. } => ( - declaration.text.as_ref().into(), - declaration.text_is_truncated, - ), - Declaration::Buffer { - rope, declaration, .. - } => ( - rope.chunks_in_range(declaration.item_range.clone()) - .collect::>(), - declaration.item_range_is_truncated, - ), - } - } - - pub fn signature_text(&self) -> (Cow<'_, str>, bool) { - match self { - Declaration::File { declaration, .. } => ( - declaration.text[self.signature_range_in_item_text()].into(), - declaration.signature_is_truncated, - ), - Declaration::Buffer { - rope, declaration, .. - } => ( - rope.chunks_in_range(declaration.signature_range.clone()) - .collect::>(), - declaration.signature_range_is_truncated, - ), - } - } - - pub fn signature_range(&self) -> Range { - match self { - Declaration::File { declaration, .. } => declaration.signature_range.clone(), - Declaration::Buffer { declaration, .. } => declaration.signature_range.clone(), - } - } - - pub fn signature_line_range(&self) -> Range { - match self { - Declaration::File { declaration, .. } => declaration.signature_line_range.clone(), - Declaration::Buffer { - declaration, rope, .. - } => { - Line(rope.offset_to_point(declaration.signature_range.start).row) - ..Line(rope.offset_to_point(declaration.signature_range.end).row) - } - } - } - - pub fn signature_range_in_item_text(&self) -> Range { - let signature_range = self.signature_range(); - let item_range = self.item_range(); - signature_range.start.saturating_sub(item_range.start) - ..(signature_range.end.saturating_sub(item_range.start)).min(item_range.len()) - } -} - -fn expand_range_to_line_boundaries_and_truncate( - range: &Range, - limit: usize, - rope: &Rope, -) -> (Range, Range, bool) { - let mut point_range = rope.offset_to_point(range.start)..rope.offset_to_point(range.end); - point_range.start.column = 0; - point_range.end.row += 1; - point_range.end.column = 0; - - let mut item_range = - rope.point_to_offset(point_range.start)..rope.point_to_offset(point_range.end); - let is_truncated = item_range.len() > limit; - if is_truncated { - item_range.end = item_range.start + limit; - } - item_range.end = rope.clip_offset(item_range.end, Bias::Left); - - let line_range = - predict_edits_v3::Line(point_range.start.row)..predict_edits_v3::Line(point_range.end.row); - (item_range, line_range, is_truncated) -} - -#[derive(Debug, Clone)] -pub struct FileDeclaration { - pub parent: Option, - pub identifier: Identifier, - /// offset range of the declaration in the file, expanded to line boundaries and truncated - pub item_range: Range, - /// line range of the declaration in the file, potentially truncated - pub item_line_range: Range, - /// text of `item_range` - pub text: Arc, - /// whether `text` was truncated - pub text_is_truncated: bool, - /// offset range of the signature in the file, expanded to line boundaries and truncated - pub signature_range: Range, - /// line range of the signature in the file, truncated - pub signature_line_range: Range, - /// whether `signature` was truncated - pub signature_is_truncated: bool, -} - -impl FileDeclaration { - pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> FileDeclaration { - let (item_range_in_file, item_line_range_in_file, text_is_truncated) = - expand_range_to_line_boundaries_and_truncate( - &declaration.item_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); - - let (mut signature_range_in_file, signature_line_range, mut signature_is_truncated) = - expand_range_to_line_boundaries_and_truncate( - &declaration.signature_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); - - if signature_range_in_file.start < item_range_in_file.start { - signature_range_in_file.start = item_range_in_file.start; - signature_is_truncated = true; - } - if signature_range_in_file.end > item_range_in_file.end { - signature_range_in_file.end = item_range_in_file.end; - signature_is_truncated = true; - } - - FileDeclaration { - parent: None, - identifier: declaration.identifier, - signature_range: signature_range_in_file, - signature_line_range, - signature_is_truncated, - text: rope - .chunks_in_range(item_range_in_file.clone()) - .collect::() - .into(), - text_is_truncated, - item_range: item_range_in_file, - item_line_range: item_line_range_in_file, - } - } -} - -#[derive(Debug, Clone)] -pub struct BufferDeclaration { - pub parent: Option, - pub identifier: Identifier, - pub item_range: Range, - pub item_range_is_truncated: bool, - pub signature_range: Range, - pub signature_range_is_truncated: bool, -} - -impl BufferDeclaration { - pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> Self { - let (item_range, _item_line_range, item_range_is_truncated) = - expand_range_to_line_boundaries_and_truncate( - &declaration.item_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); - let (signature_range, _signature_line_range, signature_range_is_truncated) = - expand_range_to_line_boundaries_and_truncate( - &declaration.signature_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); - Self { - parent: None, - identifier: declaration.identifier, - item_range, - item_range_is_truncated, - signature_range, - signature_range_is_truncated, - } - } -} - -#[derive(Debug, Clone)] -pub struct CachedDeclarationPath { - pub worktree_abs_path: Arc, - pub rel_path: Arc, - /// The relative path of the file, possibly stripped according to `import_path_strip_regex`. - pub rel_path_after_regex_stripping: Arc, -} - -impl CachedDeclarationPath { - pub fn new( - worktree_abs_path: Arc, - path: &Arc, - language: Option<&Arc>, - ) -> Self { - let rel_path = path.clone(); - let rel_path_after_regex_stripping = if let Some(language) = language - && let Some(strip_regex) = language.config().import_path_strip_regex.as_ref() - && let Ok(stripped) = RelPath::unix(&Path::new( - strip_regex.replace_all(rel_path.as_unix_str(), "").as_ref(), - )) { - Arc::from(stripped) - } else { - rel_path.clone() - }; - CachedDeclarationPath { - worktree_abs_path, - rel_path, - rel_path_after_regex_stripping, - } - } - - #[cfg(test)] - pub fn new_for_test(worktree_abs_path: &str, rel_path: &str) -> Self { - let rel_path: Arc = util::rel_path::rel_path(rel_path).into(); - CachedDeclarationPath { - worktree_abs_path: std::path::PathBuf::from(worktree_abs_path).into(), - rel_path_after_regex_stripping: rel_path.clone(), - rel_path, - } - } - - pub fn ends_with_posix_path(&self, path: &Path) -> bool { - if path.as_os_str().len() <= self.rel_path_after_regex_stripping.as_unix_str().len() { - path_ends_with(self.rel_path_after_regex_stripping.as_std_path(), path) - } else { - if let Some(remaining) = - strip_path_suffix(path, self.rel_path_after_regex_stripping.as_std_path()) - { - path_ends_with(&self.worktree_abs_path, remaining) - } else { - false - } - } - } - - pub fn equals_absolute_path(&self, path: &Path) -> bool { - if let Some(remaining) = - strip_path_suffix(path, &self.rel_path_after_regex_stripping.as_std_path()) - { - self.worktree_abs_path.as_ref() == remaining - } else { - false - } - } -} diff --git a/crates/edit_prediction_context/src/declaration_scoring.rs b/crates/edit_prediction_context/src/declaration_scoring.rs deleted file mode 100644 index 48a823362769770c836b44e7d8a6c1942d3a1196..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/declaration_scoring.rs +++ /dev/null @@ -1,539 +0,0 @@ -use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents; -use collections::HashMap; -use language::BufferSnapshot; -use ordered_float::OrderedFloat; -use project::ProjectEntryId; -use serde::Serialize; -use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc}; -use strum::EnumIter; -use text::{Point, ToPoint}; -use util::RangeExt as _; - -use crate::{ - CachedDeclarationPath, Declaration, EditPredictionExcerpt, Identifier, - imports::{Import, Imports, Module}, - reference::{Reference, ReferenceRegion}, - syntax_index::SyntaxIndexState, - text_similarity::{Occurrences, jaccard_similarity, weighted_overlap_coefficient}, -}; - -const MAX_IDENTIFIER_DECLARATION_COUNT: usize = 16; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct EditPredictionScoreOptions { - pub omit_excerpt_overlaps: bool, -} - -#[derive(Clone, Debug)] -pub struct ScoredDeclaration { - /// identifier used by the local reference - pub identifier: Identifier, - pub declaration: Declaration, - pub components: DeclarationScoreComponents, -} - -#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub enum DeclarationStyle { - Signature, - Declaration, -} - -#[derive(Clone, Debug, Serialize, Default)] -pub struct DeclarationScores { - pub signature: f32, - pub declaration: f32, - pub retrieval: f32, -} - -impl ScoredDeclaration { - /// Returns the score for this declaration with the specified style. - pub fn score(&self, style: DeclarationStyle) -> f32 { - // TODO: handle truncation - - // Score related to how likely this is the correct declaration, range 0 to 1 - let retrieval = self.retrieval_score(); - - // Score related to the distance between the reference and cursor, range 0 to 1 - let distance_score = if self.components.is_referenced_nearby { - 1.0 / (1.0 + self.components.reference_line_distance as f32 / 10.0).powf(2.0) - } else { - // same score as ~14 lines away, rationale is to not overly penalize references from parent signatures - 0.5 - }; - - // For now instead of linear combination, the scores are just multiplied together. - let combined_score = 10.0 * retrieval * distance_score; - - match style { - DeclarationStyle::Signature => { - combined_score * self.components.excerpt_vs_signature_weighted_overlap - } - DeclarationStyle::Declaration => { - 2.0 * combined_score * self.components.excerpt_vs_item_weighted_overlap - } - } - } - - pub fn retrieval_score(&self) -> f32 { - let mut score = if self.components.is_same_file { - 10.0 / self.components.same_file_declaration_count as f32 - } else if self.components.path_import_match_count > 0 { - 3.0 - } else if self.components.wildcard_path_import_match_count > 0 { - 1.0 - } else if self.components.normalized_import_similarity > 0.0 { - self.components.normalized_import_similarity - } else if self.components.normalized_wildcard_import_similarity > 0.0 { - 0.5 * self.components.normalized_wildcard_import_similarity - } else { - 1.0 / self.components.declaration_count as f32 - }; - score *= 1. + self.components.included_by_others as f32 / 2.; - score *= 1. + self.components.includes_others as f32 / 4.; - score - } - - pub fn size(&self, style: DeclarationStyle) -> usize { - match &self.declaration { - Declaration::File { declaration, .. } => match style { - DeclarationStyle::Signature => declaration.signature_range.len(), - DeclarationStyle::Declaration => declaration.text.len(), - }, - Declaration::Buffer { declaration, .. } => match style { - DeclarationStyle::Signature => declaration.signature_range.len(), - DeclarationStyle::Declaration => declaration.item_range.len(), - }, - } - } - - pub fn score_density(&self, style: DeclarationStyle) -> f32 { - self.score(style) / self.size(style) as f32 - } -} - -pub fn scored_declarations( - options: &EditPredictionScoreOptions, - index: &SyntaxIndexState, - excerpt: &EditPredictionExcerpt, - excerpt_occurrences: &Occurrences, - adjacent_occurrences: &Occurrences, - imports: &Imports, - identifier_to_references: HashMap>, - cursor_offset: usize, - current_buffer: &BufferSnapshot, -) -> Vec { - let cursor_point = cursor_offset.to_point(¤t_buffer); - - let mut wildcard_import_occurrences = Vec::new(); - let mut wildcard_import_paths = Vec::new(); - for wildcard_import in imports.wildcard_modules.iter() { - match wildcard_import { - Module::Namespace(namespace) => { - wildcard_import_occurrences.push(namespace.occurrences()) - } - Module::SourceExact(path) => wildcard_import_paths.push(path), - Module::SourceFuzzy(path) => { - wildcard_import_occurrences.push(Occurrences::from_path(&path)) - } - } - } - - let mut scored_declarations = Vec::new(); - let mut project_entry_id_to_outline_ranges: HashMap>> = - HashMap::default(); - for (identifier, references) in identifier_to_references { - let mut import_occurrences = Vec::new(); - let mut import_paths = Vec::new(); - let mut found_external_identifier: Option<&Identifier> = None; - - if let Some(imports) = imports.identifier_to_imports.get(&identifier) { - // only use alias when it's the only import, could be generalized if some language - // has overlapping aliases - // - // TODO: when an aliased declaration is included in the prompt, should include the - // aliasing in the prompt. - // - // TODO: For SourceFuzzy consider having componentwise comparison that pays - // attention to ordering. - if let [ - Import::Alias { - module, - external_identifier, - }, - ] = imports.as_slice() - { - match module { - Module::Namespace(namespace) => { - import_occurrences.push(namespace.occurrences()) - } - Module::SourceExact(path) => import_paths.push(path), - Module::SourceFuzzy(path) => { - import_occurrences.push(Occurrences::from_path(&path)) - } - } - found_external_identifier = Some(&external_identifier); - } else { - for import in imports { - match import { - Import::Direct { module } => match module { - Module::Namespace(namespace) => { - import_occurrences.push(namespace.occurrences()) - } - Module::SourceExact(path) => import_paths.push(path), - Module::SourceFuzzy(path) => { - import_occurrences.push(Occurrences::from_path(&path)) - } - }, - Import::Alias { .. } => {} - } - } - } - } - - let identifier_to_lookup = found_external_identifier.unwrap_or(&identifier); - // TODO: update this to be able to return more declarations? Especially if there is the - // ability to quickly filter a large list (based on imports) - let identifier_declarations = index - .declarations_for_identifier::(&identifier_to_lookup); - let declaration_count = identifier_declarations.len(); - - if declaration_count == 0 { - continue; - } - - // TODO: option to filter out other candidates when same file / import match - let mut checked_declarations = Vec::with_capacity(declaration_count); - for (declaration_id, declaration) in identifier_declarations { - match declaration { - Declaration::Buffer { - buffer_id, - declaration: buffer_declaration, - .. - } => { - if buffer_id == ¤t_buffer.remote_id() { - let already_included_in_prompt = - range_intersection(&buffer_declaration.item_range, &excerpt.range) - .is_some() - || excerpt - .parent_declarations - .iter() - .any(|(excerpt_parent, _)| excerpt_parent == &declaration_id); - if !options.omit_excerpt_overlaps || !already_included_in_prompt { - let declaration_line = buffer_declaration - .item_range - .start - .to_point(current_buffer) - .row; - let declaration_line_distance = - (cursor_point.row as i32 - declaration_line as i32).unsigned_abs(); - checked_declarations.push(CheckedDeclaration { - declaration, - same_file_line_distance: Some(declaration_line_distance), - path_import_match_count: 0, - wildcard_path_import_match_count: 0, - }); - } - continue; - } else { - } - } - Declaration::File { .. } => {} - } - let declaration_path = declaration.cached_path(); - let path_import_match_count = import_paths - .iter() - .filter(|import_path| { - declaration_path_matches_import(&declaration_path, import_path) - }) - .count(); - let wildcard_path_import_match_count = wildcard_import_paths - .iter() - .filter(|import_path| { - declaration_path_matches_import(&declaration_path, import_path) - }) - .count(); - checked_declarations.push(CheckedDeclaration { - declaration, - same_file_line_distance: None, - path_import_match_count, - wildcard_path_import_match_count, - }); - } - - let mut max_import_similarity = 0.0; - let mut max_wildcard_import_similarity = 0.0; - - let mut scored_declarations_for_identifier = Vec::with_capacity(checked_declarations.len()); - for checked_declaration in checked_declarations { - let same_file_declaration_count = - index.file_declaration_count(checked_declaration.declaration); - - let declaration = score_declaration( - &identifier, - &references, - checked_declaration, - same_file_declaration_count, - declaration_count, - &excerpt_occurrences, - &adjacent_occurrences, - &import_occurrences, - &wildcard_import_occurrences, - cursor_point, - current_buffer, - ); - - if declaration.components.import_similarity > max_import_similarity { - max_import_similarity = declaration.components.import_similarity; - } - - if declaration.components.wildcard_import_similarity > max_wildcard_import_similarity { - max_wildcard_import_similarity = declaration.components.wildcard_import_similarity; - } - - project_entry_id_to_outline_ranges - .entry(declaration.declaration.project_entry_id()) - .or_default() - .push(declaration.declaration.item_range()); - scored_declarations_for_identifier.push(declaration); - } - - if max_import_similarity > 0.0 || max_wildcard_import_similarity > 0.0 { - for declaration in scored_declarations_for_identifier.iter_mut() { - if max_import_similarity > 0.0 { - declaration.components.max_import_similarity = max_import_similarity; - declaration.components.normalized_import_similarity = - declaration.components.import_similarity / max_import_similarity; - } - if max_wildcard_import_similarity > 0.0 { - declaration.components.normalized_wildcard_import_similarity = - declaration.components.wildcard_import_similarity - / max_wildcard_import_similarity; - } - } - } - - scored_declarations.extend(scored_declarations_for_identifier); - } - - // TODO: Inform this via import / retrieval scores of outline items - // TODO: Consider using a sweepline - for scored_declaration in scored_declarations.iter_mut() { - let project_entry_id = scored_declaration.declaration.project_entry_id(); - let Some(ranges) = project_entry_id_to_outline_ranges.get(&project_entry_id) else { - continue; - }; - for range in ranges { - if range.contains_inclusive(&scored_declaration.declaration.item_range()) { - scored_declaration.components.included_by_others += 1 - } else if scored_declaration - .declaration - .item_range() - .contains_inclusive(range) - { - scored_declaration.components.includes_others += 1 - } - } - } - - scored_declarations.sort_unstable_by_key(|declaration| { - Reverse(OrderedFloat( - declaration.score(DeclarationStyle::Declaration), - )) - }); - - scored_declarations -} - -struct CheckedDeclaration<'a> { - declaration: &'a Declaration, - same_file_line_distance: Option, - path_import_match_count: usize, - wildcard_path_import_match_count: usize, -} - -fn declaration_path_matches_import( - declaration_path: &CachedDeclarationPath, - import_path: &Arc, -) -> bool { - if import_path.is_absolute() { - declaration_path.equals_absolute_path(import_path) - } else { - declaration_path.ends_with_posix_path(import_path) - } -} - -fn range_intersection(a: &Range, b: &Range) -> Option> { - let start = a.start.clone().max(b.start.clone()); - let end = a.end.clone().min(b.end.clone()); - if start < end { - Some(Range { start, end }) - } else { - None - } -} - -fn score_declaration( - identifier: &Identifier, - references: &[Reference], - checked_declaration: CheckedDeclaration, - same_file_declaration_count: usize, - declaration_count: usize, - excerpt_occurrences: &Occurrences, - adjacent_occurrences: &Occurrences, - import_occurrences: &[Occurrences], - wildcard_import_occurrences: &[Occurrences], - cursor: Point, - current_buffer: &BufferSnapshot, -) -> ScoredDeclaration { - let CheckedDeclaration { - declaration, - same_file_line_distance, - path_import_match_count, - wildcard_path_import_match_count, - } = checked_declaration; - - let is_referenced_nearby = references - .iter() - .any(|r| r.region == ReferenceRegion::Nearby); - let is_referenced_in_breadcrumb = references - .iter() - .any(|r| r.region == ReferenceRegion::Breadcrumb); - let reference_count = references.len(); - let reference_line_distance = references - .iter() - .map(|r| { - let reference_line = r.range.start.to_point(current_buffer).row as i32; - (cursor.row as i32 - reference_line).unsigned_abs() - }) - .min() - .unwrap(); - - let is_same_file = same_file_line_distance.is_some(); - let declaration_line_distance = same_file_line_distance.unwrap_or(u32::MAX); - - let item_source_occurrences = Occurrences::within_string(&declaration.item_text().0); - let item_signature_occurrences = Occurrences::within_string(&declaration.signature_text().0); - let excerpt_vs_item_jaccard = jaccard_similarity(excerpt_occurrences, &item_source_occurrences); - let excerpt_vs_signature_jaccard = - jaccard_similarity(excerpt_occurrences, &item_signature_occurrences); - let adjacent_vs_item_jaccard = - jaccard_similarity(adjacent_occurrences, &item_source_occurrences); - let adjacent_vs_signature_jaccard = - jaccard_similarity(adjacent_occurrences, &item_signature_occurrences); - - let excerpt_vs_item_weighted_overlap = - weighted_overlap_coefficient(excerpt_occurrences, &item_source_occurrences); - let excerpt_vs_signature_weighted_overlap = - weighted_overlap_coefficient(excerpt_occurrences, &item_signature_occurrences); - let adjacent_vs_item_weighted_overlap = - weighted_overlap_coefficient(adjacent_occurrences, &item_source_occurrences); - let adjacent_vs_signature_weighted_overlap = - weighted_overlap_coefficient(adjacent_occurrences, &item_signature_occurrences); - - let mut import_similarity = 0f32; - let mut wildcard_import_similarity = 0f32; - if !import_occurrences.is_empty() || !wildcard_import_occurrences.is_empty() { - let cached_path = declaration.cached_path(); - let path_occurrences = Occurrences::from_worktree_path( - cached_path - .worktree_abs_path - .file_name() - .map(|f| f.to_string_lossy()), - &cached_path.rel_path, - ); - import_similarity = import_occurrences - .iter() - .map(|namespace_occurrences| { - OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences)) - }) - .max() - .map(|similarity| similarity.into_inner()) - .unwrap_or_default(); - - // TODO: Consider something other than max - wildcard_import_similarity = wildcard_import_occurrences - .iter() - .map(|namespace_occurrences| { - OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences)) - }) - .max() - .map(|similarity| similarity.into_inner()) - .unwrap_or_default(); - } - - // TODO: Consider adding declaration_file_count - let score_components = DeclarationScoreComponents { - is_same_file, - is_referenced_nearby, - is_referenced_in_breadcrumb, - reference_line_distance, - declaration_line_distance, - reference_count, - same_file_declaration_count, - declaration_count, - excerpt_vs_item_jaccard, - excerpt_vs_signature_jaccard, - adjacent_vs_item_jaccard, - adjacent_vs_signature_jaccard, - excerpt_vs_item_weighted_overlap, - excerpt_vs_signature_weighted_overlap, - adjacent_vs_item_weighted_overlap, - adjacent_vs_signature_weighted_overlap, - path_import_match_count, - wildcard_path_import_match_count, - import_similarity, - max_import_similarity: 0.0, - normalized_import_similarity: 0.0, - wildcard_import_similarity, - normalized_wildcard_import_similarity: 0.0, - included_by_others: 0, - includes_others: 0, - }; - - ScoredDeclaration { - identifier: identifier.clone(), - declaration: declaration.clone(), - components: score_components, - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_declaration_path_matches() { - let declaration_path = - CachedDeclarationPath::new_for_test("/home/user/project", "src/maths.ts"); - - assert!(declaration_path_matches_import( - &declaration_path, - &Path::new("maths.ts").into() - )); - - assert!(declaration_path_matches_import( - &declaration_path, - &Path::new("project/src/maths.ts").into() - )); - - assert!(declaration_path_matches_import( - &declaration_path, - &Path::new("user/project/src/maths.ts").into() - )); - - assert!(declaration_path_matches_import( - &declaration_path, - &Path::new("/home/user/project/src/maths.ts").into() - )); - - assert!(!declaration_path_matches_import( - &declaration_path, - &Path::new("other.ts").into() - )); - - assert!(!declaration_path_matches_import( - &declaration_path, - &Path::new("/home/user/project/src/other.ts").into() - )); - } -} diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index 65623a825c2f7e2db42b98174748e5f04fb91d2a..15576a835d9b4b0781b1e3979edbed443fa40f62 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -1,335 +1,474 @@ -mod declaration; -mod declaration_scoring; +use crate::assemble_excerpts::assemble_excerpts; +use anyhow::Result; +use collections::HashMap; +use futures::{FutureExt, StreamExt as _, channel::mpsc, future}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; +use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, Point, ToOffset as _}; +use project::{LocationLink, Project, ProjectPath}; +use smallvec::SmallVec; +use std::{ + collections::hash_map, + ops::Range, + path::Path, + sync::Arc, + time::{Duration, Instant}, +}; +use util::{RangeExt as _, ResultExt}; + +mod assemble_excerpts; +#[cfg(test)] +mod edit_prediction_context_tests; mod excerpt; -mod imports; -mod outline; -mod reference; -mod syntax_index; -pub mod text_similarity; +#[cfg(test)] +mod fake_definition_lsp; -use std::{path::Path, sync::Arc}; +pub use cloud_llm_client::predict_edits_v3::Line; +pub use excerpt::{EditPredictionExcerpt, EditPredictionExcerptOptions, EditPredictionExcerptText}; +pub use zeta_prompt::{RelatedExcerpt, RelatedFile}; -use cloud_llm_client::predict_edits_v3; -use collections::HashMap; -use gpui::{App, AppContext as _, Entity, Task}; -use language::BufferSnapshot; -use text::{Point, ToOffset as _}; - -pub use declaration::*; -pub use declaration_scoring::*; -pub use excerpt::*; -pub use imports::*; -pub use reference::*; -pub use syntax_index::*; - -pub use predict_edits_v3::Line; - -#[derive(Clone, Debug, PartialEq)] -pub struct EditPredictionContextOptions { - pub use_imports: bool, - pub excerpt: EditPredictionExcerptOptions, - pub score: EditPredictionScoreOptions, - pub max_retrieved_declarations: u8, +const IDENTIFIER_LINE_COUNT: u32 = 3; + +pub struct RelatedExcerptStore { + project: WeakEntity, + related_files: Arc<[RelatedFile]>, + related_file_buffers: Vec>, + cache: HashMap>, + update_tx: mpsc::UnboundedSender<(Entity, Anchor)>, + identifier_line_count: u32, +} + +pub enum RelatedExcerptStoreEvent { + StartedRefresh, + FinishedRefresh { + cache_hit_count: usize, + cache_miss_count: usize, + mean_definition_latency: Duration, + max_definition_latency: Duration, + }, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct Identifier { + pub name: String, + pub range: Range, +} + +enum DefinitionTask { + CacheHit(Arc), + CacheMiss(Task>>>), +} + +#[derive(Debug)] +struct CacheEntry { + definitions: SmallVec<[CachedDefinition; 1]>, } #[derive(Clone, Debug)] -pub struct EditPredictionContext { - pub excerpt: EditPredictionExcerpt, - pub excerpt_text: EditPredictionExcerptText, - pub cursor_point: Point, - pub declarations: Vec, +struct CachedDefinition { + path: ProjectPath, + buffer: Entity, + anchor_range: Range, } -impl EditPredictionContext { - pub fn gather_context_in_background( - cursor_point: Point, - buffer: BufferSnapshot, - options: EditPredictionContextOptions, - syntax_index: Option>, - cx: &mut App, - ) -> Task> { - let parent_abs_path = project::File::from_dyn(buffer.file()).and_then(|f| { - let mut path = f.worktree.read(cx).absolutize(&f.path); - if path.pop() { Some(path) } else { None } - }); +const DEBOUNCE_DURATION: Duration = Duration::from_millis(100); + +impl EventEmitter for RelatedExcerptStore {} + +impl RelatedExcerptStore { + pub fn new(project: &Entity, cx: &mut Context) -> Self { + let (update_tx, mut update_rx) = mpsc::unbounded::<(Entity, Anchor)>(); + cx.spawn(async move |this, cx| { + let executor = cx.background_executor().clone(); + while let Some((mut buffer, mut position)) = update_rx.next().await { + let mut timer = executor.timer(DEBOUNCE_DURATION).fuse(); + loop { + futures::select_biased! { + next = update_rx.next() => { + if let Some((new_buffer, new_position)) = next { + buffer = new_buffer; + position = new_position; + timer = executor.timer(DEBOUNCE_DURATION).fuse(); + } else { + return anyhow::Ok(()); + } + } + _ = timer => break, + } + } - if let Some(syntax_index) = syntax_index { - let index_state = - syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state())); - cx.background_spawn(async move { - let parent_abs_path = parent_abs_path.as_deref(); - let index_state = index_state.upgrade()?; - let index_state = index_state.lock().await; - Self::gather_context( - cursor_point, - &buffer, - parent_abs_path, - &options, - Some(&index_state), - ) - }) - } else { - cx.background_spawn(async move { - let parent_abs_path = parent_abs_path.as_deref(); - Self::gather_context(cursor_point, &buffer, parent_abs_path, &options, None) - }) + Self::fetch_excerpts(this.clone(), buffer, position, cx).await?; + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + + RelatedExcerptStore { + project: project.downgrade(), + update_tx, + related_files: Vec::new().into(), + related_file_buffers: Vec::new(), + cache: Default::default(), + identifier_line_count: IDENTIFIER_LINE_COUNT, } } - pub fn gather_context( - cursor_point: Point, - buffer: &BufferSnapshot, - parent_abs_path: Option<&Path>, - options: &EditPredictionContextOptions, - index_state: Option<&SyntaxIndexState>, - ) -> Option { - let imports = if options.use_imports { - Imports::gather(&buffer, parent_abs_path) - } else { - Imports::default() - }; - Self::gather_context_with_references_fn( - cursor_point, - buffer, - &imports, - options, - index_state, - references_in_excerpt, - ) + pub fn set_identifier_line_count(&mut self, count: u32) { + self.identifier_line_count = count; } - pub fn gather_context_with_references_fn( - cursor_point: Point, - buffer: &BufferSnapshot, - imports: &Imports, - options: &EditPredictionContextOptions, - index_state: Option<&SyntaxIndexState>, - get_references: impl FnOnce( - &EditPredictionExcerpt, - &EditPredictionExcerptText, - &BufferSnapshot, - ) -> HashMap>, - ) -> Option { - let excerpt = EditPredictionExcerpt::select_from_buffer( - cursor_point, - buffer, - &options.excerpt, - index_state, - )?; - let excerpt_text = excerpt.text(buffer); - - let declarations = if options.max_retrieved_declarations > 0 - && let Some(index_state) = index_state - { - let excerpt_occurrences = - text_similarity::Occurrences::within_string(&excerpt_text.body); - - let adjacent_start = Point::new(cursor_point.row.saturating_sub(2), 0); - let adjacent_end = Point::new(cursor_point.row + 1, 0); - let adjacent_occurrences = text_similarity::Occurrences::within_string( - &buffer - .text_for_range(adjacent_start..adjacent_end) - .collect::(), - ); + pub fn refresh(&mut self, buffer: Entity, position: Anchor, _: &mut Context) { + self.update_tx.unbounded_send((buffer, position)).ok(); + } - let cursor_offset_in_file = cursor_point.to_offset(buffer); + pub fn related_files(&self) -> Arc<[RelatedFile]> { + self.related_files.clone() + } - let references = get_references(&excerpt, &excerpt_text, buffer); + pub fn related_files_with_buffers( + &self, + ) -> impl Iterator)> { + self.related_files + .iter() + .cloned() + .zip(self.related_file_buffers.iter().cloned()) + } - let mut declarations = scored_declarations( - &options.score, - &index_state, - &excerpt, - &excerpt_occurrences, - &adjacent_occurrences, - &imports, - references, - cursor_offset_in_file, - buffer, - ); - // TODO [zeta2] if we need this when we ship, we should probably do it in a smarter way - declarations.truncate(options.max_retrieved_declarations as usize); - declarations - } else { - vec![] + pub fn set_related_files(&mut self, files: Vec) { + self.related_files = files.into(); + } + + async fn fetch_excerpts( + this: WeakEntity, + buffer: Entity, + position: Anchor, + cx: &mut AsyncApp, + ) -> Result<()> { + let (project, snapshot, identifier_line_count) = this.read_with(cx, |this, cx| { + ( + this.project.upgrade(), + buffer.read(cx).snapshot(), + this.identifier_line_count, + ) + })?; + let Some(project) = project else { + return Ok(()); }; - Some(Self { - excerpt, - excerpt_text, - cursor_point, - declarations, - }) - } -} + let file = snapshot.file().cloned(); + if let Some(file) = &file { + log::debug!("retrieving_context buffer:{}", file.path().as_unix_str()); + } -#[cfg(test)] -mod tests { - use super::*; - use std::sync::Arc; - - use gpui::{Entity, TestAppContext}; - use indoc::indoc; - use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust}; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - - use crate::{EditPredictionExcerptOptions, SyntaxIndex}; - - #[gpui::test] - async fn test_call_site(cx: &mut TestAppContext) { - let (project, index, _rust_lang_id) = init_test(cx).await; - - let buffer = project - .update(cx, |project, cx| { - let project_path = project.find_project_path("c.rs", cx).unwrap(); - project.open_buffer(project_path, cx) - }) - .await - .unwrap(); - - cx.run_until_parked(); - - // first process_data call site - let cursor_point = language::Point::new(8, 21); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let context = cx - .update(|cx| { - EditPredictionContext::gather_context_in_background( - cursor_point, - buffer_snapshot, - EditPredictionContextOptions { - use_imports: true, - excerpt: EditPredictionExcerptOptions { - max_bytes: 60, - min_bytes: 10, - target_before_cursor_over_total_bytes: 0.5, - }, - score: EditPredictionScoreOptions { - omit_excerpt_overlaps: true, - }, - max_retrieved_declarations: u8::MAX, - }, - Some(index.clone()), - cx, - ) + this.update(cx, |_, cx| { + cx.emit(RelatedExcerptStoreEvent::StartedRefresh); + })?; + + let identifiers = cx + .background_spawn(async move { + identifiers_for_position(&snapshot, position, identifier_line_count) }) - .await - .unwrap(); + .await; + + let async_cx = cx.clone(); + let start_time = Instant::now(); + let futures = this.update(cx, |this, cx| { + identifiers + .into_iter() + .filter_map(|identifier| { + let task = if let Some(entry) = this.cache.get(&identifier) { + DefinitionTask::CacheHit(entry.clone()) + } else { + DefinitionTask::CacheMiss( + this.project + .update(cx, |project, cx| { + project.definitions(&buffer, identifier.range.start, cx) + }) + .ok()?, + ) + }; + + let cx = async_cx.clone(); + let project = project.clone(); + Some(async move { + match task { + DefinitionTask::CacheHit(cache_entry) => { + Some((identifier, cache_entry, None)) + } + DefinitionTask::CacheMiss(task) => { + let locations = task.await.log_err()??; + let duration = start_time.elapsed(); + cx.update(|cx| { + ( + identifier, + Arc::new(CacheEntry { + definitions: locations + .into_iter() + .filter_map(|location| { + process_definition(location, &project, cx) + }) + .collect(), + }), + Some(duration), + ) + }) + .ok() + } + } + }) + }) + .collect::>() + })?; + + let mut cache_hit_count = 0; + let mut cache_miss_count = 0; + let mut mean_definition_latency = Duration::ZERO; + let mut max_definition_latency = Duration::ZERO; + let mut new_cache = HashMap::default(); + new_cache.reserve(futures.len()); + for (identifier, entry, duration) in future::join_all(futures).await.into_iter().flatten() { + new_cache.insert(identifier, entry); + if let Some(duration) = duration { + cache_miss_count += 1; + mean_definition_latency += duration; + max_definition_latency = max_definition_latency.max(duration); + } else { + cache_hit_count += 1; + } + } + mean_definition_latency /= cache_miss_count.max(1) as u32; - let mut snippet_identifiers = context - .declarations - .iter() - .map(|snippet| snippet.identifier.name.as_ref()) - .collect::>(); - snippet_identifiers.sort(); - assert_eq!(snippet_identifiers, vec!["main", "process_data"]); - drop(buffer); - } + let (new_cache, related_files, related_file_buffers) = + rebuild_related_files(&project, new_cache, cx).await?; - async fn init_test( - cx: &mut TestAppContext, - ) -> (Entity, Entity, LanguageId) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); + if let Some(file) = &file { + log::debug!( + "finished retrieving context buffer:{}, latency:{:?}", + file.path().as_unix_str(), + start_time.elapsed() + ); + } - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "a.rs": indoc! {r#" - fn main() { - let x = 1; - let y = 2; - let z = add(x, y); - println!("Result: {}", z); - } + this.update(cx, |this, cx| { + this.cache = new_cache; + this.related_files = related_files.into(); + this.related_file_buffers = related_file_buffers; + cx.emit(RelatedExcerptStoreEvent::FinishedRefresh { + cache_hit_count, + cache_miss_count, + mean_definition_latency, + max_definition_latency, + }); + })?; + + anyhow::Ok(()) + } +} - fn add(a: i32, b: i32) -> i32 { - a + b - } - "#}, - "b.rs": indoc! {" - pub struct Config { - pub name: String, - pub value: i32, +async fn rebuild_related_files( + project: &Entity, + new_entries: HashMap>, + cx: &mut AsyncApp, +) -> Result<( + HashMap>, + Vec, + Vec>, +)> { + let mut snapshots = HashMap::default(); + let mut worktree_root_names = HashMap::default(); + for entry in new_entries.values() { + for definition in &entry.definitions { + if let hash_map::Entry::Vacant(e) = snapshots.entry(definition.buffer.entity_id()) { + definition + .buffer + .read_with(cx, |buffer, _| buffer.parsing_idle())? + .await; + e.insert( + definition + .buffer + .read_with(cx, |buffer, _| buffer.snapshot())?, + ); + } + let worktree_id = definition.path.worktree_id; + if let hash_map::Entry::Vacant(e) = + worktree_root_names.entry(definition.path.worktree_id) + { + project.read_with(cx, |project, cx| { + if let Some(worktree) = project.worktree_for_id(worktree_id, cx) { + e.insert(worktree.read(cx).root_name().as_unix_str().to_string()); } + })?; + } + } + } - impl Config { - pub fn new(name: String, value: i32) -> Self { - Config { name, value } - } - } - "}, - "c.rs": indoc! {r#" - use std::collections::HashMap; - - fn main() { - let args: Vec = std::env::args().collect(); - let data: Vec = args[1..] - .iter() - .filter_map(|s| s.parse().ok()) - .collect(); - let result = process_data(data); - println!("{:?}", result); - } + Ok(cx + .background_spawn(async move { + let mut files = Vec::new(); + let mut ranges_by_buffer = HashMap::<_, Vec>>::default(); + let mut paths_by_buffer = HashMap::default(); + for entry in new_entries.values() { + for definition in &entry.definitions { + let Some(snapshot) = snapshots.get(&definition.buffer.entity_id()) else { + continue; + }; + paths_by_buffer.insert(definition.buffer.entity_id(), definition.path.clone()); + ranges_by_buffer + .entry(definition.buffer.clone()) + .or_default() + .push(definition.anchor_range.to_point(snapshot)); + } + } + + for (buffer, ranges) in ranges_by_buffer { + let Some(snapshot) = snapshots.get(&buffer.entity_id()) else { + continue; + }; + let Some(project_path) = paths_by_buffer.get(&buffer.entity_id()) else { + continue; + }; + let excerpts = assemble_excerpts(snapshot, ranges); + let Some(root_name) = worktree_root_names.get(&project_path.worktree_id) else { + continue; + }; + + let path = Path::new(&format!( + "{}/{}", + root_name, + project_path.path.as_unix_str() + )) + .into(); + + files.push(( + buffer, + RelatedFile { + path, + excerpts, + max_row: snapshot.max_point().row, + }, + )); + } - fn process_data(data: Vec) -> HashMap { - let mut counts = HashMap::new(); - for value in data { - *counts.entry(value).or_insert(0) += 1; - } - counts - } + files.sort_by_key(|(_, file)| file.path.clone()); + let (related_buffers, related_files) = files.into_iter().unzip(); - #[cfg(test)] - mod tests { - use super::*; + (new_entries, related_files, related_buffers) + }) + .await) +} - #[test] - fn test_process_data() { - let data = vec![1, 2, 2, 3]; - let result = process_data(data); - assert_eq!(result.get(&2), Some(&2)); - } - } - "#} - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let lang = rust_lang(); - let lang_id = lang.id(); - language_registry.add(Arc::new(lang)); - - let file_indexing_parallelism = 2; - let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx)); - cx.run_until_parked(); - - (project, index, lang_id) +const MAX_TARGET_LEN: usize = 128; + +fn process_definition( + location: LocationLink, + project: &Entity, + cx: &mut App, +) -> Option { + let buffer = location.target.buffer.read(cx); + let anchor_range = location.target.range; + let file = buffer.file()?; + let worktree = project.read(cx).worktree_for_id(file.worktree_id(cx), cx)?; + if worktree.read(cx).is_single_file() { + return None; } - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_highlights_query(include_str!("../../languages/src/rust/highlights.scm")) - .unwrap() - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() + // If the target range is large, it likely means we requested the definition of an entire module. + // For individual definitions, the target range should be small as it only covers the symbol. + let buffer = location.target.buffer.read(cx); + let target_len = anchor_range.to_offset(&buffer).len(); + if target_len > MAX_TARGET_LEN { + return None; } + + Some(CachedDefinition { + path: ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path().clone(), + }, + buffer: location.target.buffer, + anchor_range, + }) +} + +/// Gets all of the identifiers that are present in the given line, and its containing +/// outline items. +fn identifiers_for_position( + buffer: &BufferSnapshot, + position: Anchor, + identifier_line_count: u32, +) -> Vec { + let offset = position.to_offset(buffer); + let point = buffer.offset_to_point(offset); + + // Search for identifiers on lines adjacent to the cursor. + let start = Point::new(point.row.saturating_sub(identifier_line_count), 0); + let end = Point::new(point.row + identifier_line_count + 1, 0).min(buffer.max_point()); + let line_range = start..end; + let mut ranges = vec![line_range.to_offset(&buffer)]; + + // Search for identifiers mentioned in headers/signatures of containing outline items. + let outline_items = buffer.outline_items_as_offsets_containing(offset..offset, false, None); + for item in outline_items { + if let Some(body_range) = item.body_range(&buffer) { + ranges.push(item.range.start..body_range.start.to_offset(&buffer)); + } else { + ranges.push(item.range.clone()); + } + } + + ranges.sort_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); + ranges.dedup_by(|a, b| { + if a.start <= b.end { + b.start = b.start.min(a.start); + b.end = b.end.max(a.end); + true + } else { + false + } + }); + + let mut identifiers = Vec::new(); + let outer_range = + ranges.first().map_or(0, |r| r.start)..ranges.last().map_or(buffer.len(), |r| r.end); + + let mut captures = buffer + .syntax + .captures(outer_range.clone(), &buffer.text, |grammar| { + grammar + .highlights_config + .as_ref() + .map(|config| &config.query) + }); + + for range in ranges { + captures.set_byte_range(range.start..outer_range.end); + + let mut last_range = None; + while let Some(capture) = captures.peek() { + let node_range = capture.node.byte_range(); + if node_range.start > range.end { + break; + } + let config = captures.grammars()[capture.grammar_index] + .highlights_config + .as_ref(); + + if let Some(config) = config + && config.identifier_capture_indices.contains(&capture.index) + && range.contains_inclusive(&node_range) + && Some(&node_range) != last_range.as_ref() + { + let name = buffer.text_for_range(node_range.clone()).collect(); + identifiers.push(Identifier { + range: buffer.anchor_after(node_range.start) + ..buffer.anchor_before(node_range.end), + name, + }); + last_range = Some(node_range); + } + + captures.advance(); + } + } + + identifiers } diff --git a/crates/edit_prediction_context/src/edit_prediction_context_tests.rs b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..d93a66081164a3fc70f7e1072d91a02bd9adbd37 --- /dev/null +++ b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs @@ -0,0 +1,510 @@ +use super::*; +use futures::channel::mpsc::UnboundedReceiver; +use gpui::TestAppContext; +use indoc::indoc; +use language::{Point, ToPoint as _, rust_lang}; +use lsp::FakeLanguageServer; +use project::{FakeFs, LocationLink, Project}; +use serde_json::json; +use settings::SettingsStore; +use std::fmt::Write as _; +use util::{path, test::marked_text_ranges}; + +#[gpui::test] +async fn test_edit_prediction_context(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/root"), test_project_1()).await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let mut servers = setup_fake_lsp(&project, cx); + + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + let _server = servers.next().await.unwrap(); + cx.run_until_parked(); + + let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(&project, cx)); + related_excerpt_store.update(cx, |store, cx| { + let position = { + let buffer = buffer.read(cx); + let offset = buffer.text().find("todo").unwrap(); + buffer.anchor_before(offset) + }; + + store.set_identifier_line_count(0); + store.refresh(buffer.clone(), position, cx); + }); + + cx.executor().advance_clock(DEBOUNCE_DURATION); + related_excerpt_store.update(cx, |store, _| { + let excerpts = store.related_files(); + assert_related_files( + &excerpts, + &[ + ( + "root/src/company.rs", + &[indoc! {" + pub struct Company { + owner: Arc, + address: Address, + }"}], + ), + ( + "root/src/main.rs", + &[ + indoc! {" + pub struct Session { + company: Arc, + } + + impl Session { + pub fn set_company(&mut self, company: Arc) {"}, + indoc! {" + } + }"}, + ], + ), + ( + "root/src/person.rs", + &[ + indoc! {" + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + }"}, + "}", + ], + ), + ], + ); + }); +} + +#[gpui::test] +fn test_assemble_excerpts(cx: &mut TestAppContext) { + let table = [ + ( + indoc! {r#" + struct User { + first_name: String, + «last_name»: String, + age: u32, + email: String, + create_at: Instant, + } + + impl User { + pub fn first_name(&self) -> String { + self.first_name.clone() + } + + pub fn full_name(&self) -> String { + « format!("{} {}", self.first_name, self.last_name) + » } + } + "#}, + indoc! {r#" + struct User { + first_name: String, + last_name: String, + … + } + + impl User { + … + pub fn full_name(&self) -> String { + format!("{} {}", self.first_name, self.last_name) + } + } + "#}, + ), + ( + indoc! {r#" + struct «User» { + first_name: String, + last_name: String, + age: u32, + } + + impl User { + // methods + } + "#}, + indoc! {r#" + struct User { + first_name: String, + last_name: String, + age: u32, + } + … + "#}, + ), + ( + indoc! {r#" + trait «FooProvider» { + const NAME: &'static str; + + fn provide_foo(&self, id: usize) -> Foo; + + fn provide_foo_batched(&self, ids: &[usize]) -> Vec { + ids.iter() + .map(|id| self.provide_foo(*id)) + .collect() + } + + fn sync(&self); + } + "# + }, + indoc! {r#" + trait FooProvider { + const NAME: &'static str; + + fn provide_foo(&self, id: usize) -> Foo; + + fn provide_foo_batched(&self, ids: &[usize]) -> Vec { + … + } + + fn sync(&self); + } + "#}, + ), + ( + indoc! {r#" + trait «Something» { + fn method1(&self, id: usize) -> Foo; + + fn method2(&self, ids: &[usize]) -> Vec { + struct Helper1 { + field1: usize, + } + + struct Helper2 { + field2: usize, + } + + struct Helper3 { + filed2: usize, + } + } + + fn sync(&self); + } + "# + }, + indoc! {r#" + trait Something { + fn method1(&self, id: usize) -> Foo; + + fn method2(&self, ids: &[usize]) -> Vec { + … + } + + fn sync(&self); + } + "#}, + ), + ]; + + for (input, expected_output) in table { + let (input, ranges) = marked_text_ranges(&input, false); + let buffer = cx.new(|cx| Buffer::local(input, cx).with_language(rust_lang(), cx)); + buffer.read_with(cx, |buffer, _cx| { + let ranges: Vec> = ranges + .into_iter() + .map(|range| range.to_point(&buffer)) + .collect(); + + let excerpts = assemble_excerpts(&buffer.snapshot(), ranges); + + let output = format_excerpts(buffer, &excerpts); + assert_eq!(output, expected_output); + }); + } +} + +#[gpui::test] +async fn test_fake_definition_lsp(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/root"), test_project_1()).await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let mut servers = setup_fake_lsp(&project, cx); + + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + let _server = servers.next().await.unwrap(); + cx.run_until_parked(); + + let buffer_text = buffer.read_with(cx, |buffer, _| buffer.text()); + + let definitions = project + .update(cx, |project, cx| { + let offset = buffer_text.find("Address {").unwrap(); + project.definitions(&buffer, offset, cx) + }) + .await + .unwrap() + .unwrap(); + assert_definitions(&definitions, &["pub struct Address {"], cx); + + let definitions = project + .update(cx, |project, cx| { + let offset = buffer_text.find("State::CA").unwrap(); + project.definitions(&buffer, offset, cx) + }) + .await + .unwrap() + .unwrap(); + assert_definitions(&definitions, &["pub enum State {"], cx); + + let definitions = project + .update(cx, |project, cx| { + let offset = buffer_text.find("to_string()").unwrap(); + project.definitions(&buffer, offset, cx) + }) + .await + .unwrap() + .unwrap(); + assert_definitions(&definitions, &["pub fn to_string(&self) -> String {"], cx); +} + +fn init_test(cx: &mut TestAppContext) { + let settings_store = cx.update(|cx| SettingsStore::test(cx)); + cx.set_global(settings_store); + env_logger::try_init().ok(); +} + +fn setup_fake_lsp( + project: &Entity, + cx: &mut TestAppContext, +) -> UnboundedReceiver { + let (language_registry, fs) = project.read_with(cx, |project, _| { + (project.languages().clone(), project.fs().clone()) + }); + let language = rust_lang(); + language_registry.add(language.clone()); + fake_definition_lsp::register_fake_definition_server(&language_registry, language, fs) +} + +fn test_project_1() -> serde_json::Value { + let person_rs = indoc! {r#" + pub struct Person { + first_name: String, + last_name: String, + email: String, + age: u32, + } + + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + } + + pub fn get_last_name(&self) -> &str { + &self.last_name + } + + pub fn get_email(&self) -> &str { + &self.email + } + + pub fn get_age(&self) -> u32 { + self.age + } + } + "#}; + + let address_rs = indoc! {r#" + pub struct Address { + street: String, + city: String, + state: State, + zip: u32, + } + + pub enum State { + CA, + OR, + WA, + TX, + // ... + } + + impl Address { + pub fn get_street(&self) -> &str { + &self.street + } + + pub fn get_city(&self) -> &str { + &self.city + } + + pub fn get_state(&self) -> State { + self.state + } + + pub fn get_zip(&self) -> u32 { + self.zip + } + } + "#}; + + let company_rs = indoc! {r#" + use super::person::Person; + use super::address::Address; + + pub struct Company { + owner: Arc, + address: Address, + } + + impl Company { + pub fn get_owner(&self) -> &Person { + &self.owner + } + + pub fn get_address(&self) -> &Address { + &self.address + } + + pub fn to_string(&self) -> String { + format!("{} ({})", self.owner.first_name, self.address.city) + } + } + "#}; + + let main_rs = indoc! {r#" + use std::sync::Arc; + use super::person::Person; + use super::address::Address; + use super::company::Company; + + pub struct Session { + company: Arc, + } + + impl Session { + pub fn set_company(&mut self, company: Arc) { + self.company = company; + if company.owner != self.company.owner { + log("new owner", company.owner.get_first_name()); todo(); + } + } + } + + fn main() { + let company = Company { + owner: Arc::new(Person { + first_name: "John".to_string(), + last_name: "Doe".to_string(), + email: "john@example.com".to_string(), + age: 30, + }), + address: Address { + street: "123 Main St".to_string(), + city: "Anytown".to_string(), + state: State::CA, + zip: 12345, + }, + }; + + println!("Company: {}", company.to_string()); + } + "#}; + + json!({ + "src": { + "person.rs": person_rs, + "address.rs": address_rs, + "company.rs": company_rs, + "main.rs": main_rs, + }, + }) +} + +fn assert_related_files(actual_files: &[RelatedFile], expected_files: &[(&str, &[&str])]) { + let actual_files = actual_files + .iter() + .map(|file| { + let excerpts = file + .excerpts + .iter() + .map(|excerpt| excerpt.text.to_string()) + .collect::>(); + (file.path.to_str().unwrap(), excerpts) + }) + .collect::>(); + let expected_excerpts = expected_files + .iter() + .map(|(path, texts)| { + ( + *path, + texts + .iter() + .map(|line| line.to_string()) + .collect::>(), + ) + }) + .collect::>(); + pretty_assertions::assert_eq!(actual_files, expected_excerpts) +} + +fn assert_definitions(definitions: &[LocationLink], first_lines: &[&str], cx: &mut TestAppContext) { + let actual_first_lines = definitions + .iter() + .map(|definition| { + definition.target.buffer.read_with(cx, |buffer, _| { + let mut start = definition.target.range.start.to_point(&buffer); + start.column = 0; + let end = Point::new(start.row, buffer.line_len(start.row)); + buffer + .text_for_range(start..end) + .collect::() + .trim() + .to_string() + }) + }) + .collect::>(); + + assert_eq!(actual_first_lines, first_lines); +} + +fn format_excerpts(buffer: &Buffer, excerpts: &[RelatedExcerpt]) -> String { + let mut output = String::new(); + let file_line_count = buffer.max_point().row; + let mut current_row = 0; + for excerpt in excerpts { + if excerpt.text.is_empty() { + continue; + } + if current_row < excerpt.row_range.start { + writeln!(&mut output, "…").unwrap(); + } + current_row = excerpt.row_range.start; + + for line in excerpt.text.to_string().lines() { + output.push_str(line); + output.push('\n'); + current_row += 1; + } + } + if current_row < file_line_count { + writeln!(&mut output, "…").unwrap(); + } + output +} diff --git a/crates/edit_prediction_context/src/excerpt.rs b/crates/edit_prediction_context/src/excerpt.rs index 7a4bb73edfa131b620a930d7f0e1c0da77e0afe6..3fc7eed4ace5a83992bf496aef3e364aea96e215 100644 --- a/crates/edit_prediction_context/src/excerpt.rs +++ b/crates/edit_prediction_context/src/excerpt.rs @@ -1,11 +1,9 @@ -use language::{BufferSnapshot, LanguageId}; +use cloud_llm_client::predict_edits_v3::Line; +use language::{BufferSnapshot, LanguageId, Point, ToOffset as _, ToPoint as _}; use std::ops::Range; -use text::{Point, ToOffset as _, ToPoint as _}; use tree_sitter::{Node, TreeCursor}; use util::RangeExt; -use crate::{BufferDeclaration, Line, declaration::DeclarationId, syntax_index::SyntaxIndexState}; - // TODO: // // - Test parent signatures @@ -31,19 +29,16 @@ pub struct EditPredictionExcerptOptions { pub target_before_cursor_over_total_bytes: f32, } -// TODO: consider merging these #[derive(Debug, Clone)] pub struct EditPredictionExcerpt { pub range: Range, pub line_range: Range, - pub parent_declarations: Vec<(DeclarationId, Range)>, pub size: usize, } #[derive(Debug, Clone)] pub struct EditPredictionExcerptText { pub body: String, - pub parent_signatures: Vec, pub language_id: Option, } @@ -52,17 +47,8 @@ impl EditPredictionExcerpt { let body = buffer .text_for_range(self.range.clone()) .collect::(); - let parent_signatures = self - .parent_declarations - .iter() - .map(|(_, range)| buffer.text_for_range(range.clone()).collect::()) - .collect(); let language_id = buffer.language().map(|l| l.id()); - EditPredictionExcerptText { - body, - parent_signatures, - language_id, - } + EditPredictionExcerptText { body, language_id } } /// Selects an excerpt around a buffer position, attempting to choose logical boundaries based @@ -79,7 +65,6 @@ impl EditPredictionExcerpt { query_point: Point, buffer: &BufferSnapshot, options: &EditPredictionExcerptOptions, - syntax_index: Option<&SyntaxIndexState>, ) -> Option { if buffer.len() <= options.max_bytes { log::debug!( @@ -89,11 +74,7 @@ impl EditPredictionExcerpt { ); let offset_range = 0..buffer.len(); let line_range = Line(0)..Line(buffer.max_point().row); - return Some(EditPredictionExcerpt::new( - offset_range, - line_range, - Vec::new(), - )); + return Some(EditPredictionExcerpt::new(offset_range, line_range)); } let query_offset = query_point.to_offset(buffer); @@ -104,19 +85,10 @@ impl EditPredictionExcerpt { return None; } - let parent_declarations = if let Some(syntax_index) = syntax_index { - syntax_index - .buffer_declarations_containing_range(buffer.remote_id(), query_range.clone()) - .collect() - } else { - Vec::new() - }; - let excerpt_selector = ExcerptSelector { query_offset, query_range, query_line_range: Line(query_line_range.start)..Line(query_line_range.end), - parent_declarations: &parent_declarations, buffer, options, }; @@ -139,20 +111,10 @@ impl EditPredictionExcerpt { excerpt_selector.select_lines() } - fn new( - range: Range, - line_range: Range, - parent_declarations: Vec<(DeclarationId, Range)>, - ) -> Self { - let size = range.len() - + parent_declarations - .iter() - .map(|(_, range)| range.len()) - .sum::(); + fn new(range: Range, line_range: Range) -> Self { Self { + size: range.len(), range, - parent_declarations, - size, line_range, } } @@ -162,14 +124,7 @@ impl EditPredictionExcerpt { // this is an issue because parent_signature_ranges may be incorrect log::error!("bug: with_expanded_range called with disjoint range"); } - let mut parent_declarations = Vec::with_capacity(self.parent_declarations.len()); - for (declaration_id, range) in &self.parent_declarations { - if !range.contains_inclusive(&new_range) { - break; - } - parent_declarations.push((*declaration_id, range.clone())); - } - Self::new(new_range, new_line_range, parent_declarations) + Self::new(new_range, new_line_range) } fn parent_signatures_size(&self) -> usize { @@ -181,7 +136,6 @@ struct ExcerptSelector<'a> { query_offset: usize, query_range: Range, query_line_range: Range, - parent_declarations: &'a [(DeclarationId, &'a BufferDeclaration)], buffer: &'a BufferSnapshot, options: &'a EditPredictionExcerptOptions, } @@ -409,13 +363,7 @@ impl<'a> ExcerptSelector<'a> { } fn make_excerpt(&self, range: Range, line_range: Range) -> EditPredictionExcerpt { - let parent_declarations = self - .parent_declarations - .iter() - .filter(|(_, declaration)| declaration.item_range.contains_inclusive(&range)) - .map(|(id, declaration)| (*id, declaration.signature_range.clone())) - .collect(); - EditPredictionExcerpt::new(range, line_range, parent_declarations) + EditPredictionExcerpt::new(range, line_range) } /// Returns `true` if the `forward` excerpt is a better choice than the `backward` excerpt. @@ -471,30 +419,14 @@ fn node_line_end(node: Node) -> Point { mod tests { use super::*; use gpui::{AppContext, TestAppContext}; - use language::{Buffer, Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; + use language::Buffer; use util::test::{generate_marked_text, marked_text_offsets_by}; fn create_buffer(text: &str, cx: &mut TestAppContext) -> BufferSnapshot { - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang().into(), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(language::rust_lang(), cx)); buffer.read_with(cx, |buffer, _| buffer.snapshot()) } - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } - fn cursor_and_excerpt_range(text: &str) -> (String, usize, Range) { let (text, offsets) = marked_text_offsets_by(text, vec!['ˇ', '«', '»']); (text, offsets[&'ˇ'][0], offsets[&'«'][0]..offsets[&'»'][0]) @@ -506,9 +438,8 @@ mod tests { let buffer = create_buffer(&text, cx); let cursor_point = cursor.to_point(&buffer); - let excerpt = - EditPredictionExcerpt::select_from_buffer(cursor_point, &buffer, &options, None) - .expect("Should select an excerpt"); + let excerpt = EditPredictionExcerpt::select_from_buffer(cursor_point, &buffer, &options) + .expect("Should select an excerpt"); pretty_assertions::assert_eq!( generate_marked_text(&text, std::slice::from_ref(&excerpt.range), false), generate_marked_text(&text, &[expected_excerpt], false) diff --git a/crates/edit_prediction_context/src/fake_definition_lsp.rs b/crates/edit_prediction_context/src/fake_definition_lsp.rs new file mode 100644 index 0000000000000000000000000000000000000000..31fb681309c610a37c7f886390ef5adb92ee78ef --- /dev/null +++ b/crates/edit_prediction_context/src/fake_definition_lsp.rs @@ -0,0 +1,329 @@ +use collections::HashMap; +use futures::channel::mpsc::UnboundedReceiver; +use language::{Language, LanguageRegistry}; +use lsp::{ + FakeLanguageServer, LanguageServerBinary, TextDocumentSyncCapability, TextDocumentSyncKind, Uri, +}; +use parking_lot::Mutex; +use project::Fs; +use std::{ops::Range, path::PathBuf, sync::Arc}; +use tree_sitter::{Parser, QueryCursor, StreamingIterator, Tree}; + +/// Registers a fake language server that implements go-to-definition using tree-sitter, +/// making the assumption that all names are unique, and all variables' types are +/// explicitly declared. +pub fn register_fake_definition_server( + language_registry: &Arc, + language: Arc, + fs: Arc, +) -> UnboundedReceiver { + let index = Arc::new(Mutex::new(DefinitionIndex::new(language.clone()))); + + language_registry.register_fake_lsp( + language.name(), + language::FakeLspAdapter { + name: "fake-definition-lsp", + initialization_options: None, + prettier_plugins: Vec::new(), + disk_based_diagnostics_progress_token: None, + disk_based_diagnostics_sources: Vec::new(), + language_server_binary: LanguageServerBinary { + path: PathBuf::from("fake-definition-lsp"), + arguments: Vec::new(), + env: None, + }, + capabilities: lsp::ServerCapabilities { + definition_provider: Some(lsp::OneOf::Left(true)), + text_document_sync: Some(TextDocumentSyncCapability::Kind( + TextDocumentSyncKind::FULL, + )), + ..Default::default() + }, + label_for_completion: None, + initializer: Some(Box::new({ + move |server| { + server.handle_notification::({ + let index = index.clone(); + move |params, _cx| { + index + .lock() + .open_buffer(params.text_document.uri, ¶ms.text_document.text); + } + }); + + server.handle_notification::({ + let index = index.clone(); + let fs = fs.clone(); + move |params, cx| { + let uri = params.text_document.uri; + let path = uri.to_file_path().ok(); + index.lock().mark_buffer_closed(&uri); + + if let Some(path) = path { + let index = index.clone(); + let fs = fs.clone(); + cx.spawn(async move |_cx| { + if let Ok(content) = fs.load(&path).await { + index.lock().index_file(uri, &content); + } + }) + .detach(); + } + } + }); + + server.handle_notification::({ + let index = index.clone(); + let fs = fs.clone(); + move |params, cx| { + let index = index.clone(); + let fs = fs.clone(); + cx.spawn(async move |_cx| { + for event in params.changes { + if index.lock().is_buffer_open(&event.uri) { + continue; + } + + match event.typ { + lsp::FileChangeType::DELETED => { + index.lock().remove_definitions_for_file(&event.uri); + } + lsp::FileChangeType::CREATED + | lsp::FileChangeType::CHANGED => { + if let Some(path) = event.uri.to_file_path().ok() { + if let Ok(content) = fs.load(&path).await { + index.lock().index_file(event.uri, &content); + } + } + } + _ => {} + } + } + }) + .detach(); + } + }); + + server.handle_notification::({ + let index = index.clone(); + move |params, _cx| { + if let Some(change) = params.content_changes.into_iter().last() { + index + .lock() + .index_file(params.text_document.uri, &change.text); + } + } + }); + + server.handle_notification::( + { + let index = index.clone(); + let fs = fs.clone(); + move |params, cx| { + let index = index.clone(); + let fs = fs.clone(); + let files = fs.as_fake().files(); + cx.spawn(async move |_cx| { + for folder in params.event.added { + let Ok(path) = folder.uri.to_file_path() else { + continue; + }; + for file in &files { + if let Some(uri) = Uri::from_file_path(&file).ok() + && file.starts_with(&path) + && let Ok(content) = fs.load(&file).await + { + index.lock().index_file(uri, &content); + } + } + } + }) + .detach(); + } + }, + ); + + server.set_request_handler::({ + let index = index.clone(); + move |params, _cx| { + let result = index.lock().get_definitions( + params.text_document_position_params.text_document.uri, + params.text_document_position_params.position, + ); + async move { Ok(result) } + } + }); + } + })), + }, + ) +} + +struct DefinitionIndex { + language: Arc, + definitions: HashMap>, + files: HashMap, +} + +#[derive(Debug)] +struct FileEntry { + contents: String, + is_open_in_buffer: bool, +} + +impl DefinitionIndex { + fn new(language: Arc) -> Self { + Self { + language, + definitions: HashMap::default(), + files: HashMap::default(), + } + } + + fn remove_definitions_for_file(&mut self, uri: &Uri) { + self.definitions.retain(|_, locations| { + locations.retain(|loc| &loc.uri != uri); + !locations.is_empty() + }); + self.files.remove(uri); + } + + fn open_buffer(&mut self, uri: Uri, content: &str) { + self.index_file_inner(uri, content, true); + } + + fn mark_buffer_closed(&mut self, uri: &Uri) { + if let Some(entry) = self.files.get_mut(uri) { + entry.is_open_in_buffer = false; + } + } + + fn is_buffer_open(&self, uri: &Uri) -> bool { + self.files + .get(uri) + .map(|entry| entry.is_open_in_buffer) + .unwrap_or(false) + } + + fn index_file(&mut self, uri: Uri, content: &str) { + self.index_file_inner(uri, content, false); + } + + fn index_file_inner(&mut self, uri: Uri, content: &str, is_open_in_buffer: bool) -> Option<()> { + self.remove_definitions_for_file(&uri); + let grammar = self.language.grammar()?; + let outline_config = grammar.outline_config.as_ref()?; + let mut parser = Parser::new(); + parser.set_language(&grammar.ts_language).ok()?; + let tree = parser.parse(content, None)?; + let declarations = extract_declarations_from_tree(&tree, content, outline_config); + for (name, byte_range) in declarations { + let range = byte_range_to_lsp_range(content, byte_range); + let location = lsp::Location { + uri: uri.clone(), + range, + }; + self.definitions + .entry(name) + .or_insert_with(Vec::new) + .push(location); + } + self.files.insert( + uri, + FileEntry { + contents: content.to_string(), + is_open_in_buffer, + }, + ); + + Some(()) + } + + fn get_definitions( + &mut self, + uri: Uri, + position: lsp::Position, + ) -> Option { + let entry = self.files.get(&uri)?; + let name = word_at_position(&entry.contents, position)?; + let locations = self.definitions.get(name).cloned()?; + Some(lsp::GotoDefinitionResponse::Array(locations)) + } +} + +fn extract_declarations_from_tree( + tree: &Tree, + content: &str, + outline_config: &language::OutlineConfig, +) -> Vec<(String, Range)> { + let mut cursor = QueryCursor::new(); + let mut declarations = Vec::new(); + let mut matches = cursor.matches(&outline_config.query, tree.root_node(), content.as_bytes()); + while let Some(query_match) = matches.next() { + let mut name_range: Option> = None; + let mut has_item_range = false; + + for capture in query_match.captures { + let range = capture.node.byte_range(); + if capture.index == outline_config.name_capture_ix { + name_range = Some(range); + } else if capture.index == outline_config.item_capture_ix { + has_item_range = true; + } + } + + if let Some(name_range) = name_range + && has_item_range + { + let name = content[name_range.clone()].to_string(); + if declarations.iter().any(|(n, _)| n == &name) { + continue; + } + declarations.push((name, name_range)); + } + } + declarations +} + +fn byte_range_to_lsp_range(content: &str, byte_range: Range) -> lsp::Range { + let start = byte_offset_to_position(content, byte_range.start); + let end = byte_offset_to_position(content, byte_range.end); + lsp::Range { start, end } +} + +fn byte_offset_to_position(content: &str, offset: usize) -> lsp::Position { + let mut line = 0; + let mut character = 0; + let mut current_offset = 0; + for ch in content.chars() { + if current_offset >= offset { + break; + } + if ch == '\n' { + line += 1; + character = 0; + } else { + character += 1; + } + current_offset += ch.len_utf8(); + } + lsp::Position { line, character } +} + +fn word_at_position(content: &str, position: lsp::Position) -> Option<&str> { + let mut lines = content.lines(); + let line = lines.nth(position.line as usize)?; + let column = position.character as usize; + if column > line.len() { + return None; + } + let start = line[..column] + .rfind(|c: char| !c.is_alphanumeric() && c != '_') + .map(|i| i + 1) + .unwrap_or(0); + let end = line[column..] + .find(|c: char| !c.is_alphanumeric() && c != '_') + .map(|i| i + column) + .unwrap_or(line.len()); + Some(&line[start..end]).filter(|word| !word.is_empty()) +} diff --git a/crates/edit_prediction_context/src/imports.rs b/crates/edit_prediction_context/src/imports.rs deleted file mode 100644 index 70f175159340ddb9a6f26f23db0c1b3c843e7b96..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/imports.rs +++ /dev/null @@ -1,1319 +0,0 @@ -use collections::HashMap; -use language::BufferSnapshot; -use language::ImportsConfig; -use language::Language; -use std::ops::Deref; -use std::path::Path; -use std::sync::Arc; -use std::{borrow::Cow, ops::Range}; -use text::OffsetRangeExt as _; -use util::RangeExt; -use util::paths::PathStyle; - -use crate::Identifier; -use crate::text_similarity::Occurrences; - -// TODO: Write documentation for extension authors. The @import capture must match before or in the -// same pattern as all all captures it contains - -// Future improvements to consider: -// -// * Distinguish absolute vs relative paths in captures. `#include "maths.h"` is relative whereas -// `#include ` is not. -// -// * Provide the name used when importing whole modules (see tests with "named_module" in the name). -// To be useful, will require parsing of identifier qualification. -// -// * Scoping for imports that aren't at the top level -// -// * Only scan a prefix of the file, when possible. This could look like having query matches that -// indicate it reached a declaration that is not allowed in the import section. -// -// * Support directly parsing to occurrences instead of storing namespaces / paths. Types should be -// generic on this, so that tests etc can still use strings. Could do similar in syntax index. -// -// * Distinguish different types of namespaces when known. E.g. "name.type" capture. Once capture -// names are more open-ended like this may make sense to build and cache a jump table (direct -// dispatch from capture index). -// -// * There are a few "Language specific:" comments on behavior that gets applied to all languages. -// Would be cleaner to be conditional on the language or otherwise configured. - -#[derive(Debug, Clone, Default)] -pub struct Imports { - pub identifier_to_imports: HashMap>, - pub wildcard_modules: Vec, -} - -#[derive(Debug, Clone)] -pub enum Import { - Direct { - module: Module, - }, - Alias { - module: Module, - external_identifier: Identifier, - }, -} - -#[derive(Debug, Clone)] -pub enum Module { - SourceExact(Arc), - SourceFuzzy(Arc), - Namespace(Namespace), -} - -impl Module { - fn empty() -> Self { - Module::Namespace(Namespace::default()) - } - - fn push_range( - &mut self, - range: &ModuleRange, - snapshot: &BufferSnapshot, - language: &Language, - parent_abs_path: Option<&Path>, - ) -> usize { - if range.is_empty() { - return 0; - } - - match range { - ModuleRange::Source(range) => { - if let Self::Namespace(namespace) = self - && namespace.0.is_empty() - { - let path = snapshot.text_for_range(range.clone()).collect::>(); - - let path = if let Some(strip_regex) = - language.config().import_path_strip_regex.as_ref() - { - strip_regex.replace_all(&path, "") - } else { - path - }; - - let path = Path::new(path.as_ref()); - if (path.starts_with(".") || path.starts_with("..")) - && let Some(parent_abs_path) = parent_abs_path - && let Ok(abs_path) = - util::paths::normalize_lexically(&parent_abs_path.join(path)) - { - *self = Self::SourceExact(abs_path.into()); - } else { - *self = Self::SourceFuzzy(path.into()); - }; - } else if matches!(self, Self::SourceExact(_)) - || matches!(self, Self::SourceFuzzy(_)) - { - log::warn!("bug in imports query: encountered multiple @source matches"); - } else { - log::warn!( - "bug in imports query: encountered both @namespace and @source match" - ); - } - } - ModuleRange::Namespace(range) => { - if let Self::Namespace(namespace) = self { - let segment = range_text(snapshot, range); - if language.config().ignored_import_segments.contains(&segment) { - return 0; - } else { - namespace.0.push(segment); - return 1; - } - } else { - log::warn!( - "bug in imports query: encountered both @namespace and @source match" - ); - } - } - } - 0 - } -} - -#[derive(Debug, Clone)] -enum ModuleRange { - Source(Range), - Namespace(Range), -} - -impl Deref for ModuleRange { - type Target = Range; - - fn deref(&self) -> &Self::Target { - match self { - ModuleRange::Source(range) => range, - ModuleRange::Namespace(range) => range, - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct Namespace(pub Vec>); - -impl Namespace { - pub fn occurrences(&self) -> Occurrences { - Occurrences::from_identifiers(&self.0) - } -} - -impl Imports { - pub fn gather(snapshot: &BufferSnapshot, parent_abs_path: Option<&Path>) -> Self { - // Query to match different import patterns - let mut matches = snapshot - .syntax - .matches(0..snapshot.len(), &snapshot.text, |grammar| { - grammar.imports_config().map(|imports| &imports.query) - }); - - let mut detached_nodes: Vec = Vec::new(); - let mut identifier_to_imports = HashMap::default(); - let mut wildcard_modules = Vec::new(); - let mut import_range = None; - - while let Some(query_match) = matches.peek() { - let ImportsConfig { - query: _, - import_ix, - name_ix, - namespace_ix, - source_ix, - list_ix, - wildcard_ix, - alias_ix, - } = matches.grammars()[query_match.grammar_index] - .imports_config() - .unwrap(); - - let mut new_import_range = None; - let mut alias_range = None; - let mut modules = Vec::new(); - let mut content: Option<(Range, ContentKind)> = None; - for capture in query_match.captures { - let capture_range = capture.node.byte_range(); - - if capture.index == *import_ix { - new_import_range = Some(capture_range); - } else if Some(capture.index) == *namespace_ix { - modules.push(ModuleRange::Namespace(capture_range)); - } else if Some(capture.index) == *source_ix { - modules.push(ModuleRange::Source(capture_range)); - } else if Some(capture.index) == *alias_ix { - alias_range = Some(capture_range); - } else { - let mut found_content = None; - if Some(capture.index) == *name_ix { - found_content = Some((capture_range, ContentKind::Name)); - } else if Some(capture.index) == *list_ix { - found_content = Some((capture_range, ContentKind::List)); - } else if Some(capture.index) == *wildcard_ix { - found_content = Some((capture_range, ContentKind::Wildcard)); - } - if let Some((found_content_range, found_kind)) = found_content { - if let Some((_, old_kind)) = content { - let point = found_content_range.to_point(snapshot); - log::warn!( - "bug in {} imports query: unexpected multiple captures of {} and {} ({}:{}:{})", - query_match.language.name(), - old_kind.capture_name(), - found_kind.capture_name(), - snapshot - .file() - .map(|p| p.path().display(PathStyle::Posix)) - .unwrap_or_default(), - point.start.row + 1, - point.start.column + 1 - ); - } - content = Some((found_content_range, found_kind)); - } - } - } - - if let Some(new_import_range) = new_import_range { - log::trace!("starting new import {:?}", new_import_range); - Self::gather_from_import_statement( - &detached_nodes, - &snapshot, - parent_abs_path, - &mut identifier_to_imports, - &mut wildcard_modules, - ); - detached_nodes.clear(); - import_range = Some(new_import_range.clone()); - } - - if let Some((content, content_kind)) = content { - if import_range - .as_ref() - .is_some_and(|import_range| import_range.contains_inclusive(&content)) - { - detached_nodes.push(DetachedNode { - modules, - content: content.clone(), - content_kind, - alias: alias_range.unwrap_or(0..0), - language: query_match.language.clone(), - }); - } else { - log::trace!( - "filtered out match not inside import range: {content_kind:?} at {content:?}" - ); - } - } - - matches.advance(); - } - - Self::gather_from_import_statement( - &detached_nodes, - &snapshot, - parent_abs_path, - &mut identifier_to_imports, - &mut wildcard_modules, - ); - - Imports { - identifier_to_imports, - wildcard_modules, - } - } - - fn gather_from_import_statement( - detached_nodes: &[DetachedNode], - snapshot: &BufferSnapshot, - parent_abs_path: Option<&Path>, - identifier_to_imports: &mut HashMap>, - wildcard_modules: &mut Vec, - ) { - let mut trees = Vec::new(); - - for detached_node in detached_nodes { - if let Some(node) = Self::attach_node(detached_node.into(), &mut trees) { - trees.push(node); - } - log::trace!( - "Attached node to tree\n{:#?}\nAttach result:\n{:#?}", - detached_node, - trees - .iter() - .map(|tree| tree.debug(snapshot)) - .collect::>() - ); - } - - for tree in &trees { - let mut module = Module::empty(); - Self::gather_from_tree( - tree, - snapshot, - parent_abs_path, - &mut module, - identifier_to_imports, - wildcard_modules, - ); - } - } - - fn attach_node(mut node: ImportTree, trees: &mut Vec) -> Option { - let mut tree_index = 0; - while tree_index < trees.len() { - let tree = &mut trees[tree_index]; - if !node.content.is_empty() && node.content == tree.content { - // multiple matches can apply to the same name/list/wildcard. This keeps the queries - // simpler by combining info from these matches. - if tree.module.is_empty() { - tree.module = node.module; - tree.module_children = node.module_children; - } - if tree.alias.is_empty() { - tree.alias = node.alias; - } - return None; - } else if !node.module.is_empty() && node.module.contains_inclusive(&tree.range()) { - node.module_children.push(trees.remove(tree_index)); - continue; - } else if !node.content.is_empty() && node.content.contains_inclusive(&tree.content) { - node.content_children.push(trees.remove(tree_index)); - continue; - } else if !tree.content.is_empty() && tree.content.contains_inclusive(&node.content) { - if let Some(node) = Self::attach_node(node, &mut tree.content_children) { - tree.content_children.push(node); - } - return None; - } - tree_index += 1; - } - Some(node) - } - - fn gather_from_tree( - tree: &ImportTree, - snapshot: &BufferSnapshot, - parent_abs_path: Option<&Path>, - current_module: &mut Module, - identifier_to_imports: &mut HashMap>, - wildcard_modules: &mut Vec, - ) { - let mut pop_count = 0; - - if tree.module_children.is_empty() { - pop_count += - current_module.push_range(&tree.module, snapshot, &tree.language, parent_abs_path); - } else { - for child in &tree.module_children { - pop_count += Self::extend_namespace_from_tree( - child, - snapshot, - parent_abs_path, - current_module, - ); - } - }; - - if tree.content_children.is_empty() && !tree.content.is_empty() { - match tree.content_kind { - ContentKind::Name | ContentKind::List => { - if tree.alias.is_empty() { - identifier_to_imports - .entry(Identifier { - language_id: tree.language.id(), - name: range_text(snapshot, &tree.content), - }) - .or_default() - .push(Import::Direct { - module: current_module.clone(), - }); - } else { - let alias_name: Arc = range_text(snapshot, &tree.alias); - let external_name = range_text(snapshot, &tree.content); - // Language specific: skip "_" aliases for Rust - if alias_name.as_ref() != "_" { - identifier_to_imports - .entry(Identifier { - language_id: tree.language.id(), - name: alias_name, - }) - .or_default() - .push(Import::Alias { - module: current_module.clone(), - external_identifier: Identifier { - language_id: tree.language.id(), - name: external_name, - }, - }); - } - } - } - ContentKind::Wildcard => wildcard_modules.push(current_module.clone()), - } - } else { - for child in &tree.content_children { - Self::gather_from_tree( - child, - snapshot, - parent_abs_path, - current_module, - identifier_to_imports, - wildcard_modules, - ); - } - } - - if pop_count > 0 { - match current_module { - Module::SourceExact(_) | Module::SourceFuzzy(_) => { - log::warn!( - "bug in imports query: encountered both @namespace and @source match" - ); - } - Module::Namespace(namespace) => { - namespace.0.drain(namespace.0.len() - pop_count..); - } - } - } - } - - fn extend_namespace_from_tree( - tree: &ImportTree, - snapshot: &BufferSnapshot, - parent_abs_path: Option<&Path>, - module: &mut Module, - ) -> usize { - let mut pop_count = 0; - if tree.module_children.is_empty() { - pop_count += module.push_range(&tree.module, snapshot, &tree.language, parent_abs_path); - } else { - for child in &tree.module_children { - pop_count += - Self::extend_namespace_from_tree(child, snapshot, parent_abs_path, module); - } - } - if tree.content_children.is_empty() { - pop_count += module.push_range( - &ModuleRange::Namespace(tree.content.clone()), - snapshot, - &tree.language, - parent_abs_path, - ); - } else { - for child in &tree.content_children { - pop_count += - Self::extend_namespace_from_tree(child, snapshot, parent_abs_path, module); - } - } - pop_count - } -} - -fn range_text(snapshot: &BufferSnapshot, range: &Range) -> Arc { - snapshot - .text_for_range(range.clone()) - .collect::>() - .into() -} - -#[derive(Debug)] -struct DetachedNode { - modules: Vec, - content: Range, - content_kind: ContentKind, - alias: Range, - language: Arc, -} - -#[derive(Debug, Clone, Copy)] -enum ContentKind { - Name, - Wildcard, - List, -} - -impl ContentKind { - fn capture_name(&self) -> &'static str { - match self { - ContentKind::Name => "name", - ContentKind::Wildcard => "wildcard", - ContentKind::List => "list", - } - } -} - -#[derive(Debug)] -struct ImportTree { - module: ModuleRange, - /// When non-empty, provides namespace / source info which should be used instead of `module`. - module_children: Vec, - content: Range, - /// When non-empty, provides content which should be used instead of `content`. - content_children: Vec, - content_kind: ContentKind, - alias: Range, - language: Arc, -} - -impl ImportTree { - fn range(&self) -> Range { - self.module.start.min(self.content.start)..self.module.end.max(self.content.end) - } - - #[allow(dead_code)] - fn debug<'a>(&'a self, snapshot: &'a BufferSnapshot) -> ImportTreeDebug<'a> { - ImportTreeDebug { - tree: self, - snapshot, - } - } - - fn from_module_range(module: &ModuleRange, language: Arc) -> Self { - ImportTree { - module: module.clone(), - module_children: Vec::new(), - content: 0..0, - content_children: Vec::new(), - content_kind: ContentKind::Name, - alias: 0..0, - language, - } - } -} - -impl From<&DetachedNode> for ImportTree { - fn from(value: &DetachedNode) -> Self { - let module; - let module_children; - match value.modules.len() { - 0 => { - module = ModuleRange::Namespace(0..0); - module_children = Vec::new(); - } - 1 => { - module = value.modules[0].clone(); - module_children = Vec::new(); - } - _ => { - module = ModuleRange::Namespace( - value.modules.first().unwrap().start..value.modules.last().unwrap().end, - ); - module_children = value - .modules - .iter() - .map(|module| ImportTree::from_module_range(module, value.language.clone())) - .collect(); - } - } - - ImportTree { - module, - module_children, - content: value.content.clone(), - content_children: Vec::new(), - content_kind: value.content_kind, - alias: value.alias.clone(), - language: value.language.clone(), - } - } -} - -struct ImportTreeDebug<'a> { - tree: &'a ImportTree, - snapshot: &'a BufferSnapshot, -} - -impl std::fmt::Debug for ImportTreeDebug<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ImportTree") - .field("module_range", &self.tree.module) - .field("module_text", &range_text(self.snapshot, &self.tree.module)) - .field( - "module_children", - &self - .tree - .module_children - .iter() - .map(|child| child.debug(&self.snapshot)) - .collect::>(), - ) - .field("content_range", &self.tree.content) - .field( - "content_text", - &range_text(self.snapshot, &self.tree.content), - ) - .field( - "content_children", - &self - .tree - .content_children - .iter() - .map(|child| child.debug(&self.snapshot)) - .collect::>(), - ) - .field("content_kind", &self.tree.content_kind) - .field("alias_range", &self.tree.alias) - .field("alias_text", &range_text(self.snapshot, &self.tree.alias)) - .finish() - } -} - -#[cfg(test)] -mod test { - use std::path::PathBuf; - use std::sync::{Arc, LazyLock}; - - use super::*; - use collections::HashSet; - use gpui::{TestAppContext, prelude::*}; - use indoc::indoc; - use language::{ - Buffer, Language, LanguageConfig, tree_sitter_python, tree_sitter_rust, - tree_sitter_typescript, - }; - use regex::Regex; - - #[gpui::test] - fn test_rust_simple(cx: &mut TestAppContext) { - check_imports( - &RUST, - "use std::collections::HashMap;", - &[&["std", "collections", "HashMap"]], - cx, - ); - - check_imports( - &RUST, - "pub use std::collections::HashMap;", - &[&["std", "collections", "HashMap"]], - cx, - ); - - check_imports( - &RUST, - "use std::collections::{HashMap, HashSet};", - &[ - &["std", "collections", "HashMap"], - &["std", "collections", "HashSet"], - ], - cx, - ); - } - - #[gpui::test] - fn test_rust_nested(cx: &mut TestAppContext) { - check_imports( - &RUST, - "use std::{any::TypeId, collections::{HashMap, HashSet}};", - &[ - &["std", "any", "TypeId"], - &["std", "collections", "HashMap"], - &["std", "collections", "HashSet"], - ], - cx, - ); - - check_imports( - &RUST, - "use a::b::c::{d::e::F, g::h::I};", - &[ - &["a", "b", "c", "d", "e", "F"], - &["a", "b", "c", "g", "h", "I"], - ], - cx, - ); - } - - #[gpui::test] - fn test_rust_multiple_imports(cx: &mut TestAppContext) { - check_imports( - &RUST, - indoc! {" - use std::collections::HashMap; - use std::any::{TypeId, Any}; - "}, - &[ - &["std", "collections", "HashMap"], - &["std", "any", "TypeId"], - &["std", "any", "Any"], - ], - cx, - ); - - check_imports( - &RUST, - indoc! {" - use std::collections::HashSet; - - fn main() { - let unqualified = HashSet::new(); - let qualified = std::collections::HashMap::new(); - } - - use std::any::TypeId; - "}, - &[ - &["std", "collections", "HashSet"], - &["std", "any", "TypeId"], - ], - cx, - ); - } - - #[gpui::test] - fn test_rust_wildcard(cx: &mut TestAppContext) { - check_imports(&RUST, "use prelude::*;", &[&["prelude", "WILDCARD"]], cx); - - check_imports( - &RUST, - "use zed::prelude::*;", - &[&["zed", "prelude", "WILDCARD"]], - cx, - ); - - check_imports(&RUST, "use prelude::{*};", &[&["prelude", "WILDCARD"]], cx); - - check_imports( - &RUST, - "use prelude::{File, *};", - &[&["prelude", "File"], &["prelude", "WILDCARD"]], - cx, - ); - - check_imports( - &RUST, - "use zed::{App, prelude::*};", - &[&["zed", "App"], &["zed", "prelude", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_rust_alias(cx: &mut TestAppContext) { - check_imports( - &RUST, - "use std::io::Result as IoResult;", - &[&["std", "io", "Result AS IoResult"]], - cx, - ); - } - - #[gpui::test] - fn test_rust_crate_and_super(cx: &mut TestAppContext) { - check_imports(&RUST, "use crate::a::b::c;", &[&["a", "b", "c"]], cx); - check_imports(&RUST, "use super::a::b::c;", &[&["a", "b", "c"]], cx); - // TODO: Consider stripping leading "::". Not done for now because for the text similarity matching usecase this - // is fine. - check_imports(&RUST, "use ::a::b::c;", &[&["::a", "b", "c"]], cx); - } - - #[gpui::test] - fn test_typescript_imports(cx: &mut TestAppContext) { - let parent_abs_path = PathBuf::from("/home/user/project"); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import "./maths.js";"#, - &[&["SOURCE /home/user/project/maths", "WILDCARD"]], - cx, - ); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import "../maths.js";"#, - &[&["SOURCE /home/user/maths", "WILDCARD"]], - cx, - ); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import RandomNumberGenerator, { pi as π } from "./maths.js";"#, - &[ - &["SOURCE /home/user/project/maths", "RandomNumberGenerator"], - &["SOURCE /home/user/project/maths", "pi AS π"], - ], - cx, - ); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import { pi, phi, absolute } from "./maths.js";"#, - &[ - &["SOURCE /home/user/project/maths", "pi"], - &["SOURCE /home/user/project/maths", "phi"], - &["SOURCE /home/user/project/maths", "absolute"], - ], - cx, - ); - - // index.js is removed by import_path_strip_regex - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import { pi, phi, absolute } from "./maths/index.js";"#, - &[ - &["SOURCE /home/user/project/maths", "pi"], - &["SOURCE /home/user/project/maths", "phi"], - &["SOURCE /home/user/project/maths", "absolute"], - ], - cx, - ); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import type { SomeThing } from "./some-module.js";"#, - &[&["SOURCE /home/user/project/some-module", "SomeThing"]], - cx, - ); - - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import { type SomeThing, OtherThing } from "./some-module.js";"#, - &[ - &["SOURCE /home/user/project/some-module", "SomeThing"], - &["SOURCE /home/user/project/some-module", "OtherThing"], - ], - cx, - ); - - // index.js is removed by import_path_strip_regex - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import { type SomeThing, OtherThing } from "./some-module/index.js";"#, - &[ - &["SOURCE /home/user/project/some-module", "SomeThing"], - &["SOURCE /home/user/project/some-module", "OtherThing"], - ], - cx, - ); - - // fuzzy paths - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import { type SomeThing, OtherThing } from "@my-app/some-module.js";"#, - &[ - &["SOURCE FUZZY @my-app/some-module", "SomeThing"], - &["SOURCE FUZZY @my-app/some-module", "OtherThing"], - ], - cx, - ); - } - - #[gpui::test] - fn test_typescript_named_module_imports(cx: &mut TestAppContext) { - let parent_abs_path = PathBuf::from("/home/user/project"); - - // TODO: These should provide the name that the module is bound to. - // For now instead these are treated as unqualified wildcard imports. - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import * as math from "./maths.js";"#, - // &[&["/home/user/project/maths.js", "WILDCARD AS math"]], - &[&["SOURCE /home/user/project/maths", "WILDCARD"]], - cx, - ); - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &TYPESCRIPT, - r#"import math = require("./maths");"#, - // &[&["/home/user/project/maths", "WILDCARD AS math"]], - &[&["SOURCE /home/user/project/maths", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_python_imports(cx: &mut TestAppContext) { - check_imports(&PYTHON, "from math import pi", &[&["math", "pi"]], cx); - - check_imports( - &PYTHON, - "from math import pi, sin, cos", - &[&["math", "pi"], &["math", "sin"], &["math", "cos"]], - cx, - ); - - check_imports(&PYTHON, "from math import *", &[&["math", "WILDCARD"]], cx); - - check_imports( - &PYTHON, - "from math import foo.bar.baz", - &[&["math", "foo", "bar", "baz"]], - cx, - ); - - check_imports( - &PYTHON, - "from math import pi as PI", - &[&["math", "pi AS PI"]], - cx, - ); - - check_imports( - &PYTHON, - "from serializers.json import JsonSerializer", - &[&["serializers", "json", "JsonSerializer"]], - cx, - ); - - check_imports( - &PYTHON, - "from custom.serializers import json, xml, yaml", - &[ - &["custom", "serializers", "json"], - &["custom", "serializers", "xml"], - &["custom", "serializers", "yaml"], - ], - cx, - ); - } - - #[gpui::test] - fn test_python_named_module_imports(cx: &mut TestAppContext) { - // TODO: These should provide the name that the module is bound to. - // For now instead these are treated as unqualified wildcard imports. - // - // check_imports(&PYTHON, "import math", &[&["math", "WILDCARD as math"]], cx); - // check_imports(&PYTHON, "import math as maths", &[&["math", "WILDCARD AS maths"]], cx); - // - // Something like: - // - // (import_statement - // name: [ - // (dotted_name - // (identifier)* @namespace - // (identifier) @name.module .) - // (aliased_import - // name: (dotted_name - // ((identifier) ".")* @namespace - // (identifier) @name.module .) - // alias: (identifier) @alias) - // ]) @import - - check_imports(&PYTHON, "import math", &[&["math", "WILDCARD"]], cx); - - check_imports( - &PYTHON, - "import math as maths", - &[&["math", "WILDCARD"]], - cx, - ); - - check_imports(&PYTHON, "import a.b.c", &[&["a", "b", "c", "WILDCARD"]], cx); - - check_imports( - &PYTHON, - "import a.b.c as d", - &[&["a", "b", "c", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_python_package_relative_imports(cx: &mut TestAppContext) { - // TODO: These should provide info about the dir they are relative to, to provide more - // precise resolution. Instead, fuzzy matching is used as usual. - - check_imports(&PYTHON, "from . import math", &[&["math"]], cx); - - check_imports(&PYTHON, "from .a import math", &[&["a", "math"]], cx); - - check_imports( - &PYTHON, - "from ..a.b import math", - &[&["a", "b", "math"]], - cx, - ); - - check_imports( - &PYTHON, - "from ..a.b import *", - &[&["a", "b", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_c_imports(cx: &mut TestAppContext) { - let parent_abs_path = PathBuf::from("/home/user/project"); - - // TODO: Distinguish that these are not relative to current path - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &C, - r#"#include "#, - &[&["SOURCE FUZZY math.h", "WILDCARD"]], - cx, - ); - - // TODO: These should be treated as relative, but don't start with ./ or ../ - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &C, - r#"#include "math.h""#, - &[&["SOURCE FUZZY math.h", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_cpp_imports(cx: &mut TestAppContext) { - let parent_abs_path = PathBuf::from("/home/user/project"); - - // TODO: Distinguish that these are not relative to current path - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &CPP, - r#"#include "#, - &[&["SOURCE FUZZY math.h", "WILDCARD"]], - cx, - ); - - // TODO: These should be treated as relative, but don't start with ./ or ../ - check_imports_with_file_abs_path( - Some(&parent_abs_path), - &CPP, - r#"#include "math.h""#, - &[&["SOURCE FUZZY math.h", "WILDCARD"]], - cx, - ); - } - - #[gpui::test] - fn test_go_imports(cx: &mut TestAppContext) { - check_imports( - &GO, - r#"import . "lib/math""#, - &[&["lib/math", "WILDCARD"]], - cx, - ); - - // not included, these are only for side-effects - check_imports(&GO, r#"import _ "lib/math""#, &[], cx); - } - - #[gpui::test] - fn test_go_named_module_imports(cx: &mut TestAppContext) { - // TODO: These should provide the name that the module is bound to. - // For now instead these are treated as unqualified wildcard imports. - - check_imports( - &GO, - r#"import "lib/math""#, - &[&["lib/math", "WILDCARD"]], - cx, - ); - check_imports( - &GO, - r#"import m "lib/math""#, - &[&["lib/math", "WILDCARD"]], - cx, - ); - } - - #[track_caller] - fn check_imports( - language: &Arc, - source: &str, - expected: &[&[&str]], - cx: &mut TestAppContext, - ) { - check_imports_with_file_abs_path(None, language, source, expected, cx); - } - - #[track_caller] - fn check_imports_with_file_abs_path( - parent_abs_path: Option<&Path>, - language: &Arc, - source: &str, - expected: &[&[&str]], - cx: &mut TestAppContext, - ) { - let buffer = cx.new(|cx| { - let mut buffer = Buffer::local(source, cx); - buffer.set_language(Some(language.clone()), cx); - buffer - }); - cx.run_until_parked(); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - let imports = Imports::gather(&snapshot, parent_abs_path); - let mut actual_symbols = imports - .identifier_to_imports - .iter() - .flat_map(|(identifier, imports)| { - imports - .iter() - .map(|import| import.to_identifier_parts(identifier.name.as_ref())) - }) - .chain( - imports - .wildcard_modules - .iter() - .map(|module| module.to_identifier_parts("WILDCARD")), - ) - .collect::>(); - let mut expected_symbols = expected - .iter() - .map(|expected| expected.iter().map(|s| s.to_string()).collect::>()) - .collect::>(); - actual_symbols.sort(); - expected_symbols.sort(); - if actual_symbols != expected_symbols { - let top_layer = snapshot.syntax_layers().next().unwrap(); - panic!( - "Expected imports: {:?}\n\ - Actual imports: {:?}\n\ - Tree:\n{}", - expected_symbols, - actual_symbols, - tree_to_string(&top_layer.node()), - ); - } - } - - fn tree_to_string(node: &tree_sitter::Node) -> String { - let mut cursor = node.walk(); - let mut result = String::new(); - let mut depth = 0; - 'outer: loop { - result.push_str(&" ".repeat(depth)); - if let Some(field_name) = cursor.field_name() { - result.push_str(field_name); - result.push_str(": "); - } - if cursor.node().is_named() { - result.push_str(cursor.node().kind()); - } else { - result.push('"'); - result.push_str(cursor.node().kind()); - result.push('"'); - } - result.push('\n'); - - if cursor.goto_first_child() { - depth += 1; - continue; - } - if cursor.goto_next_sibling() { - continue; - } - while cursor.goto_parent() { - depth -= 1; - if cursor.goto_next_sibling() { - continue 'outer; - } - } - break; - } - result - } - - static RUST: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "Rust".into(), - ignored_import_segments: HashSet::from_iter(["crate".into(), "super".into()]), - import_path_strip_regex: Some(Regex::new("/(lib|mod)\\.rs$").unwrap()), - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_imports_query(include_str!("../../languages/src/rust/imports.scm")) - .unwrap(), - ) - }); - - static TYPESCRIPT: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "TypeScript".into(), - import_path_strip_regex: Some(Regex::new("(?:/index)?\\.[jt]s$").unwrap()), - ..Default::default() - }, - Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), - ) - .with_imports_query(include_str!("../../languages/src/typescript/imports.scm")) - .unwrap(), - ) - }); - - static PYTHON: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "Python".into(), - import_path_strip_regex: Some(Regex::new("/__init__\\.py$").unwrap()), - ..Default::default() - }, - Some(tree_sitter_python::LANGUAGE.into()), - ) - .with_imports_query(include_str!("../../languages/src/python/imports.scm")) - .unwrap(), - ) - }); - - // TODO: Ideally should use actual language configurations - static C: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "C".into(), - import_path_strip_regex: Some(Regex::new("^<|>$").unwrap()), - ..Default::default() - }, - Some(tree_sitter_c::LANGUAGE.into()), - ) - .with_imports_query(include_str!("../../languages/src/c/imports.scm")) - .unwrap(), - ) - }); - - static CPP: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "C++".into(), - import_path_strip_regex: Some(Regex::new("^<|>$").unwrap()), - ..Default::default() - }, - Some(tree_sitter_cpp::LANGUAGE.into()), - ) - .with_imports_query(include_str!("../../languages/src/cpp/imports.scm")) - .unwrap(), - ) - }); - - static GO: LazyLock> = LazyLock::new(|| { - Arc::new( - Language::new( - LanguageConfig { - name: "Go".into(), - ..Default::default() - }, - Some(tree_sitter_go::LANGUAGE.into()), - ) - .with_imports_query(include_str!("../../languages/src/go/imports.scm")) - .unwrap(), - ) - }); - - impl Import { - fn to_identifier_parts(&self, identifier: &str) -> Vec { - match self { - Import::Direct { module } => module.to_identifier_parts(identifier), - Import::Alias { - module, - external_identifier: external_name, - } => { - module.to_identifier_parts(&format!("{} AS {}", external_name.name, identifier)) - } - } - } - } - - impl Module { - fn to_identifier_parts(&self, identifier: &str) -> Vec { - match self { - Self::Namespace(namespace) => namespace.to_identifier_parts(identifier), - Self::SourceExact(path) => { - vec![ - format!("SOURCE {}", path.display().to_string().replace("\\", "/")), - identifier.to_string(), - ] - } - Self::SourceFuzzy(path) => { - vec![ - format!( - "SOURCE FUZZY {}", - path.display().to_string().replace("\\", "/") - ), - identifier.to_string(), - ] - } - } - } - } - - impl Namespace { - fn to_identifier_parts(&self, identifier: &str) -> Vec { - self.0 - .iter() - .map(|chunk| chunk.to_string()) - .chain(std::iter::once(identifier.to_string())) - .collect::>() - } - } -} diff --git a/crates/edit_prediction_context/src/outline.rs b/crates/edit_prediction_context/src/outline.rs deleted file mode 100644 index ec02c869dfae4cb861206cb801c285462e734f36..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/outline.rs +++ /dev/null @@ -1,126 +0,0 @@ -use language::{BufferSnapshot, SyntaxMapMatches}; -use std::{cmp::Reverse, ops::Range}; - -use crate::declaration::Identifier; - -// TODO: -// -// * how to handle multiple name captures? for now last one wins -// -// * annotation ranges -// -// * new "signature" capture for outline queries -// -// * Check parent behavior of "int x, y = 0" declarations in a test - -pub struct OutlineDeclaration { - pub parent_index: Option, - pub identifier: Identifier, - pub item_range: Range, - pub signature_range: Range, -} - -pub fn declarations_in_buffer(buffer: &BufferSnapshot) -> Vec { - declarations_overlapping_range(0..buffer.len(), buffer) -} - -pub fn declarations_overlapping_range( - range: Range, - buffer: &BufferSnapshot, -) -> Vec { - let mut declarations = OutlineIterator::new(range, buffer).collect::>(); - declarations.sort_unstable_by_key(|item| (item.item_range.start, Reverse(item.item_range.end))); - - let mut parent_stack: Vec<(usize, Range)> = Vec::new(); - for (index, declaration) in declarations.iter_mut().enumerate() { - while let Some((top_parent_index, top_parent_range)) = parent_stack.last() { - if declaration.item_range.start >= top_parent_range.end { - parent_stack.pop(); - } else { - declaration.parent_index = Some(*top_parent_index); - break; - } - } - parent_stack.push((index, declaration.item_range.clone())); - } - declarations -} - -/// Iterates outline items without being ordered w.r.t. nested items and without populating -/// `parent`. -pub struct OutlineIterator<'a> { - buffer: &'a BufferSnapshot, - matches: SyntaxMapMatches<'a>, -} - -impl<'a> OutlineIterator<'a> { - pub fn new(range: Range, buffer: &'a BufferSnapshot) -> Self { - let matches = buffer.syntax.matches(range, &buffer.text, |grammar| { - grammar.outline_config.as_ref().map(|c| &c.query) - }); - - Self { buffer, matches } - } -} - -impl<'a> Iterator for OutlineIterator<'a> { - type Item = OutlineDeclaration; - - fn next(&mut self) -> Option { - while let Some(mat) = self.matches.peek() { - let config = self.matches.grammars()[mat.grammar_index] - .outline_config - .as_ref() - .unwrap(); - - let mut name_range = None; - let mut item_range = None; - let mut signature_start = None; - let mut signature_end = None; - - let mut add_to_signature = |range: Range| { - if signature_start.is_none() { - signature_start = Some(range.start); - } - signature_end = Some(range.end); - }; - - for capture in mat.captures { - let range = capture.node.byte_range(); - if capture.index == config.name_capture_ix { - name_range = Some(range.clone()); - add_to_signature(range); - } else if Some(capture.index) == config.context_capture_ix - || Some(capture.index) == config.extra_context_capture_ix - { - add_to_signature(range); - } else if capture.index == config.item_capture_ix { - item_range = Some(range.clone()); - } - } - - let language_id = mat.language.id(); - self.matches.advance(); - - if let Some(name_range) = name_range - && let Some(item_range) = item_range - && let Some(signature_start) = signature_start - && let Some(signature_end) = signature_end - { - let name = self - .buffer - .text_for_range(name_range) - .collect::() - .into(); - - return Some(OutlineDeclaration { - identifier: Identifier { name, language_id }, - item_range: item_range, - signature_range: signature_start..signature_end, - parent_index: None, - }); - } - } - None - } -} diff --git a/crates/edit_prediction_context/src/reference.rs b/crates/edit_prediction_context/src/reference.rs deleted file mode 100644 index 699adf1d8036802a7a4b9e34ca8e8094e4f97458..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/reference.rs +++ /dev/null @@ -1,173 +0,0 @@ -use collections::HashMap; -use language::BufferSnapshot; -use std::ops::Range; -use util::RangeExt; - -use crate::{ - declaration::Identifier, - excerpt::{EditPredictionExcerpt, EditPredictionExcerptText}, -}; - -#[derive(Debug, Clone)] -pub struct Reference { - pub identifier: Identifier, - pub range: Range, - pub region: ReferenceRegion, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum ReferenceRegion { - Breadcrumb, - Nearby, -} - -pub fn references_in_excerpt( - excerpt: &EditPredictionExcerpt, - excerpt_text: &EditPredictionExcerptText, - snapshot: &BufferSnapshot, -) -> HashMap> { - let mut references = references_in_range( - excerpt.range.clone(), - excerpt_text.body.as_str(), - ReferenceRegion::Nearby, - snapshot, - ); - - for ((_, range), text) in excerpt - .parent_declarations - .iter() - .zip(excerpt_text.parent_signatures.iter()) - { - references.extend(references_in_range( - range.clone(), - text.as_str(), - ReferenceRegion::Breadcrumb, - snapshot, - )); - } - - let mut identifier_to_references: HashMap> = HashMap::default(); - for reference in references { - identifier_to_references - .entry(reference.identifier.clone()) - .or_insert_with(Vec::new) - .push(reference); - } - identifier_to_references -} - -/// Finds all nodes which have a "variable" match from the highlights query within the offset range. -pub fn references_in_range( - range: Range, - range_text: &str, - reference_region: ReferenceRegion, - buffer: &BufferSnapshot, -) -> Vec { - let mut matches = buffer - .syntax - .matches(range.clone(), &buffer.text, |grammar| { - grammar - .highlights_config - .as_ref() - .map(|config| &config.query) - }); - - let mut references = Vec::new(); - let mut last_added_range = None; - while let Some(mat) = matches.peek() { - let config = matches.grammars()[mat.grammar_index] - .highlights_config - .as_ref(); - - if let Some(config) = config { - for capture in mat.captures { - if config.identifier_capture_indices.contains(&capture.index) { - let node_range = capture.node.byte_range(); - - // sometimes multiple highlight queries match - this deduplicates them - if Some(node_range.clone()) == last_added_range { - continue; - } - - if !range.contains_inclusive(&node_range) { - continue; - } - - let identifier_text = - &range_text[node_range.start - range.start..node_range.end - range.start]; - - references.push(Reference { - identifier: Identifier { - name: identifier_text.into(), - language_id: mat.language.id(), - }, - range: node_range.clone(), - region: reference_region, - }); - last_added_range = Some(node_range); - } - } - } - - matches.advance(); - } - references -} - -#[cfg(test)] -mod test { - use gpui::{TestAppContext, prelude::*}; - use indoc::indoc; - use language::{BufferSnapshot, Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; - - use crate::reference::{ReferenceRegion, references_in_range}; - - #[gpui::test] - fn test_identifier_node_truncated(cx: &mut TestAppContext) { - let code = indoc! { r#" - fn main() { - add(1, 2); - } - - fn add(a: i32, b: i32) -> i32 { - a + b - } - "# }; - let buffer = create_buffer(code, cx); - - let range = 0..35; - let references = references_in_range( - range.clone(), - &code[range], - ReferenceRegion::Breadcrumb, - &buffer, - ); - assert_eq!(references.len(), 2); - assert_eq!(references[0].identifier.name.as_ref(), "main"); - assert_eq!(references[1].identifier.name.as_ref(), "add"); - } - - fn create_buffer(text: &str, cx: &mut TestAppContext) -> BufferSnapshot { - let buffer = - cx.new(|cx| language::Buffer::local(text, cx).with_language(rust_lang().into(), cx)); - buffer.read_with(cx, |buffer, _| buffer.snapshot()) - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_highlights_query(include_str!("../../languages/src/rust/highlights.scm")) - .unwrap() - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } -} diff --git a/crates/edit_prediction_context/src/syntax_index.rs b/crates/edit_prediction_context/src/syntax_index.rs deleted file mode 100644 index f489a083341b66c7cca3cdad76a9c7ea16fdc959..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/syntax_index.rs +++ /dev/null @@ -1,1069 +0,0 @@ -use anyhow::{Result, anyhow}; -use collections::{HashMap, HashSet}; -use futures::channel::mpsc; -use futures::lock::Mutex; -use futures::{FutureExt as _, StreamExt, future}; -use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity}; -use itertools::Itertools; - -use language::{Buffer, BufferEvent}; -use postage::stream::Stream as _; -use project::buffer_store::{BufferStore, BufferStoreEvent}; -use project::worktree_store::{WorktreeStore, WorktreeStoreEvent}; -use project::{PathChange, Project, ProjectEntryId, ProjectPath}; -use slotmap::SlotMap; -use std::iter; -use std::ops::{DerefMut, Range}; -use std::sync::Arc; -use text::BufferId; -use util::{RangeExt as _, debug_panic, some_or_debug_panic}; - -use crate::CachedDeclarationPath; -use crate::declaration::{ - BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier, -}; -use crate::outline::declarations_in_buffer; - -// TODO -// -// * Also queue / debounce buffer changes. A challenge for this is that use of -// `buffer_declarations_containing_range` assumes that the index is always immediately up to date. -// -// * Add a per language configuration for skipping indexing. -// -// * Handle tsx / ts / js referencing each-other - -// Potential future improvements: -// -// * Prevent indexing of a large file from blocking the queue. -// -// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which -// references are present and their scores. -// -// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a -// file in a build dependency. Should not be editable in that case - but how to distinguish the case -// where it should be editable? - -// Potential future optimizations: -// -// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind -// of priority system to the background executor could help - it's single threaded for now to avoid -// interfering with other work. -// -// * Parse files directly instead of loading into a Rope. -// -// - This would allow the task handling dirty_files to be done entirely on the background executor. -// -// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries, -// but that can be done by scanning characters in the flat representation. -// -// * Use something similar to slotmap without key versions. -// -// * Concurrent slotmap - -pub struct SyntaxIndex { - state: Arc>, - project: WeakEntity, - initial_file_indexing_done_rx: postage::watch::Receiver, - _file_indexing_task: Option>, -} - -pub struct SyntaxIndexState { - declarations: SlotMap, - identifiers: HashMap>, - files: HashMap, - buffers: HashMap, - dirty_files: HashMap, - dirty_files_tx: mpsc::Sender<()>, -} - -#[derive(Debug, Default)] -struct FileState { - declarations: Vec, -} - -#[derive(Default)] -struct BufferState { - declarations: Vec, - task: Option>, -} - -impl SyntaxIndex { - pub fn new( - project: &Entity, - file_indexing_parallelism: usize, - cx: &mut Context, - ) -> Self { - assert!(file_indexing_parallelism > 0); - let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1); - let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) = - postage::watch::channel(); - - let initial_state = SyntaxIndexState { - declarations: SlotMap::default(), - identifiers: HashMap::default(), - files: HashMap::default(), - buffers: HashMap::default(), - dirty_files: HashMap::default(), - dirty_files_tx, - }; - let mut this = Self { - project: project.downgrade(), - state: Arc::new(Mutex::new(initial_state)), - initial_file_indexing_done_rx, - _file_indexing_task: None, - }; - - let worktree_store = project.read(cx).worktree_store(); - let initial_worktree_snapshots = worktree_store - .read(cx) - .worktrees() - .map(|w| w.read(cx).snapshot()) - .collect::>(); - this._file_indexing_task = Some(cx.spawn(async move |this, cx| { - let snapshots_file_count = initial_worktree_snapshots - .iter() - .map(|worktree| worktree.file_count()) - .sum::(); - if snapshots_file_count > 0 { - let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism); - let chunk_count = snapshots_file_count.div_ceil(chunk_size); - let file_chunks = initial_worktree_snapshots - .iter() - .flat_map(|worktree| { - let worktree_id = worktree.id(); - worktree.files(false, 0).map(move |entry| { - ( - entry.id, - ProjectPath { - worktree_id, - path: entry.path.clone(), - }, - ) - }) - }) - .chunks(chunk_size); - - let mut tasks = Vec::with_capacity(chunk_count); - for chunk in file_chunks.into_iter() { - tasks.push(Self::update_dirty_files( - &this, - chunk.into_iter().collect(), - cx.clone(), - )); - } - futures::future::join_all(tasks).await; - log::info!("Finished initial file indexing"); - } - - *initial_file_indexing_done_tx.borrow_mut() = true; - - let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else { - return; - }; - while dirty_files_rx.next().await.is_some() { - let Some(state) = state.upgrade() else { - return; - }; - let mut state = state.lock().await; - let was_underused = state.dirty_files.capacity() > 255 - && state.dirty_files.len() * 8 < state.dirty_files.capacity(); - let dirty_files = state.dirty_files.drain().collect::>(); - if was_underused { - state.dirty_files.shrink_to_fit(); - } - drop(state); - if dirty_files.is_empty() { - continue; - } - - let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism); - let chunk_count = dirty_files.len().div_ceil(chunk_size); - let mut tasks = Vec::with_capacity(chunk_count); - let chunks = dirty_files.into_iter().chunks(chunk_size); - for chunk in chunks.into_iter() { - tasks.push(Self::update_dirty_files( - &this, - chunk.into_iter().collect(), - cx.clone(), - )); - } - futures::future::join_all(tasks).await; - } - })); - - cx.subscribe(&worktree_store, Self::handle_worktree_store_event) - .detach(); - - let buffer_store = project.read(cx).buffer_store().clone(); - for buffer in buffer_store.read(cx).buffers().collect::>() { - this.register_buffer(&buffer, cx); - } - cx.subscribe(&buffer_store, Self::handle_buffer_store_event) - .detach(); - - this - } - - async fn update_dirty_files( - this: &WeakEntity, - dirty_files: Vec<(ProjectEntryId, ProjectPath)>, - mut cx: AsyncApp, - ) { - for (entry_id, project_path) in dirty_files { - let Ok(task) = this.update(&mut cx, |this, cx| { - this.update_file(entry_id, project_path, cx) - }) else { - return; - }; - task.await; - } - } - - pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task> { - if *self.initial_file_indexing_done_rx.borrow() { - Task::ready(Ok(())) - } else { - let mut rx = self.initial_file_indexing_done_rx.clone(); - cx.background_spawn(async move { - loop { - match rx.recv().await { - Some(true) => return Ok(()), - Some(false) => {} - None => { - return Err(anyhow!( - "SyntaxIndex dropped while waiting for initial file indexing" - )); - } - } - } - }) - } - } - - pub fn indexed_file_paths(&self, cx: &App) -> Task> { - let state = self.state.clone(); - let project = self.project.clone(); - - cx.spawn(async move |cx| { - let state = state.lock().await; - let Some(project) = project.upgrade() else { - return vec![]; - }; - project - .read_with(cx, |project, cx| { - state - .files - .keys() - .filter_map(|entry_id| project.path_for_entry(*entry_id, cx)) - .collect() - }) - .unwrap_or_default() - }) - } - - fn handle_worktree_store_event( - &mut self, - _worktree_store: Entity, - event: &WorktreeStoreEvent, - cx: &mut Context, - ) { - use WorktreeStoreEvent::*; - match event { - WorktreeUpdatedEntries(worktree_id, updated_entries_set) => { - let state = Arc::downgrade(&self.state); - let worktree_id = *worktree_id; - let updated_entries_set = updated_entries_set.clone(); - cx.background_spawn(async move { - let Some(state) = state.upgrade() else { return }; - let mut state = state.lock().await; - for (path, entry_id, path_change) in updated_entries_set.iter() { - if let PathChange::Removed = path_change { - state.files.remove(entry_id); - state.dirty_files.remove(entry_id); - } else { - let project_path = ProjectPath { - worktree_id, - path: path.clone(), - }; - state.dirty_files.insert(*entry_id, project_path); - } - } - match state.dirty_files_tx.try_send(()) { - Err(err) if err.is_disconnected() => { - log::error!("bug: syntax indexing queue is disconnected"); - } - _ => {} - } - }) - .detach(); - } - WorktreeDeletedEntry(_worktree_id, project_entry_id) => { - let project_entry_id = *project_entry_id; - self.with_state(cx, move |state| { - state.files.remove(&project_entry_id); - }) - } - _ => {} - } - } - - fn handle_buffer_store_event( - &mut self, - _buffer_store: Entity, - event: &BufferStoreEvent, - cx: &mut Context, - ) { - use BufferStoreEvent::*; - match event { - BufferAdded(buffer) => self.register_buffer(buffer, cx), - BufferOpened { .. } - | BufferChangedFilePath { .. } - | BufferDropped { .. } - | SharedBufferClosed { .. } => {} - } - } - - pub fn state(&self) -> &Arc> { - &self.state - } - - fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) { - if let Some(mut state) = self.state.try_lock() { - f(&mut state); - return; - } - let state = Arc::downgrade(&self.state); - cx.background_spawn(async move { - let Some(state) = state.upgrade() else { - return; - }; - let mut state = state.lock().await; - f(&mut state) - }) - .detach(); - } - - fn register_buffer(&self, buffer: &Entity, cx: &mut Context) { - let buffer_id = buffer.read(cx).remote_id(); - cx.observe_release(buffer, move |this, _buffer, cx| { - this.with_state(cx, move |state| { - if let Some(buffer_state) = state.buffers.remove(&buffer_id) { - SyntaxIndexState::remove_buffer_declarations( - &buffer_state.declarations, - &mut state.declarations, - &mut state.identifiers, - ); - } - }) - }) - .detach(); - cx.subscribe(buffer, Self::handle_buffer_event).detach(); - - self.update_buffer(buffer.clone(), cx); - } - - fn handle_buffer_event( - &mut self, - buffer: Entity, - event: &BufferEvent, - cx: &mut Context, - ) { - match event { - BufferEvent::Edited | - // paths are cached and so should be updated - BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx), - _ => {} - } - } - - fn update_buffer(&self, buffer_entity: Entity, cx: &mut Context) { - let buffer = buffer_entity.read(cx); - if buffer.language().is_none() { - return; - } - - let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file()) - .and_then(|f| { - let project_entry_id = f.project_entry_id()?; - let cached_path = CachedDeclarationPath::new( - f.worktree.read(cx).abs_path(), - &f.path, - buffer.language(), - ); - Some((project_entry_id, cached_path)) - }) - else { - return; - }; - let buffer_id = buffer.remote_id(); - - let mut parse_status = buffer.parse_status(); - let snapshot_task = cx.spawn({ - let weak_buffer = buffer_entity.downgrade(); - async move |_, cx| { - while *parse_status.borrow() != language::ParseStatus::Idle { - parse_status.changed().await?; - } - weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot()) - } - }); - - let state = Arc::downgrade(&self.state); - let task = cx.background_spawn(async move { - // TODO: How to handle errors? - let Ok(snapshot) = snapshot_task.await else { - return; - }; - let rope = snapshot.text.as_rope(); - - let declarations = declarations_in_buffer(&snapshot) - .into_iter() - .map(|item| { - ( - item.parent_index, - BufferDeclaration::from_outline(item, &rope), - ) - }) - .collect::>(); - - let Some(state) = state.upgrade() else { - return; - }; - let mut state = state.lock().await; - let state = state.deref_mut(); - - let buffer_state = state - .buffers - .entry(buffer_id) - .or_insert_with(Default::default); - - SyntaxIndexState::remove_buffer_declarations( - &buffer_state.declarations, - &mut state.declarations, - &mut state.identifiers, - ); - - let mut new_ids = Vec::with_capacity(declarations.len()); - state.declarations.reserve(declarations.len()); - for (parent_index, mut declaration) in declarations { - declaration.parent = - parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied())); - - let identifier = declaration.identifier.clone(); - let declaration_id = state.declarations.insert(Declaration::Buffer { - rope: rope.clone(), - buffer_id, - declaration, - project_entry_id, - cached_path: cached_path.clone(), - }); - new_ids.push(declaration_id); - - state - .identifiers - .entry(identifier) - .or_default() - .insert(declaration_id); - } - - buffer_state.declarations = new_ids; - }); - - self.with_state(cx, move |state| { - state - .buffers - .entry(buffer_id) - .or_insert_with(Default::default) - .task = Some(task) - }); - } - - fn update_file( - &mut self, - entry_id: ProjectEntryId, - project_path: ProjectPath, - cx: &mut Context, - ) -> Task<()> { - let Some(project) = self.project.upgrade() else { - return Task::ready(()); - }; - let project = project.read(cx); - - let language_registry = project.languages(); - let Some(available_language) = - language_registry.language_for_file_path(project_path.path.as_std_path()) - else { - return Task::ready(()); - }; - let language = if let Some(Ok(Ok(language))) = language_registry - .load_language(&available_language) - .now_or_never() - { - if language - .grammar() - .is_none_or(|grammar| grammar.outline_config.is_none()) - { - return Task::ready(()); - } - future::Either::Left(async { Ok(language) }) - } else { - let language_registry = language_registry.clone(); - future::Either::Right(async move { - anyhow::Ok( - language_registry - .load_language(&available_language) - .await??, - ) - }) - }; - - let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else { - return Task::ready(()); - }; - - let snapshot_task = worktree.update(cx, |worktree, cx| { - let load_task = worktree.load_file(&project_path.path, cx); - let worktree_abs_path = worktree.abs_path(); - cx.spawn(async move |_this, cx| { - let loaded_file = load_task.await?; - let language = language.await?; - - let buffer = cx.new(|cx| { - let mut buffer = Buffer::local(loaded_file.text, cx); - buffer.set_language(Some(language.clone()), cx); - buffer - })?; - - let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?; - while *parse_status.borrow() != language::ParseStatus::Idle { - parse_status.changed().await?; - } - - let cached_path = CachedDeclarationPath::new( - worktree_abs_path, - &project_path.path, - Some(&language), - ); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - - anyhow::Ok((snapshot, cached_path)) - }) - }); - - let state = Arc::downgrade(&self.state); - cx.background_spawn(async move { - // TODO: How to handle errors? - let Ok((snapshot, cached_path)) = snapshot_task.await else { - return; - }; - let rope = snapshot.as_rope(); - let declarations = declarations_in_buffer(&snapshot) - .into_iter() - .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope))) - .collect::>(); - - let Some(state) = state.upgrade() else { - return; - }; - let mut state = state.lock().await; - let state = state.deref_mut(); - - let file_state = state.files.entry(entry_id).or_insert_with(Default::default); - for old_declaration_id in &file_state.declarations { - let Some(declaration) = state.declarations.remove(*old_declaration_id) else { - debug_panic!("declaration not found"); - continue; - }; - if let Some(identifier_declarations) = - state.identifiers.get_mut(declaration.identifier()) - { - identifier_declarations.remove(old_declaration_id); - } - } - - let mut new_ids = Vec::with_capacity(declarations.len()); - state.declarations.reserve(declarations.len()); - for (parent_index, mut declaration) in declarations { - declaration.parent = - parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied())); - - let identifier = declaration.identifier.clone(); - let declaration_id = state.declarations.insert(Declaration::File { - project_entry_id: entry_id, - declaration, - cached_path: cached_path.clone(), - }); - new_ids.push(declaration_id); - - state - .identifiers - .entry(identifier) - .or_default() - .insert(declaration_id); - } - file_state.declarations = new_ids; - }) - } -} - -impl SyntaxIndexState { - pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> { - self.declarations.get(id) - } - - /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector. - /// - /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded. - pub fn declarations_for_identifier( - &self, - identifier: &Identifier, - ) -> Vec<(DeclarationId, &Declaration)> { - // make sure to not have a large stack allocation - assert!(N < 32); - - let Some(declaration_ids) = self.identifiers.get(&identifier) else { - return vec![]; - }; - - let mut result = Vec::with_capacity(N); - let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new(); - let mut file_declarations = Vec::new(); - - for declaration_id in declaration_ids { - let declaration = self.declarations.get(*declaration_id); - let Some(declaration) = some_or_debug_panic(declaration) else { - continue; - }; - match declaration { - Declaration::Buffer { - project_entry_id, .. - } => { - included_buffer_entry_ids.push(*project_entry_id); - result.push((*declaration_id, declaration)); - if result.len() == N { - return Vec::new(); - } - } - Declaration::File { - project_entry_id, .. - } => { - if !included_buffer_entry_ids.contains(&project_entry_id) { - file_declarations.push((*declaration_id, declaration)); - } - } - } - } - - for (declaration_id, declaration) in file_declarations { - match declaration { - Declaration::File { - project_entry_id, .. - } => { - if !included_buffer_entry_ids.contains(&project_entry_id) { - result.push((declaration_id, declaration)); - - if result.len() == N { - return Vec::new(); - } - } - } - Declaration::Buffer { .. } => {} - } - } - - result - } - - pub fn buffer_declarations_containing_range( - &self, - buffer_id: BufferId, - range: Range, - ) -> impl Iterator { - let Some(buffer_state) = self.buffers.get(&buffer_id) else { - return itertools::Either::Left(iter::empty()); - }; - - let iter = buffer_state - .declarations - .iter() - .filter_map(move |declaration_id| { - let Some(declaration) = self - .declarations - .get(*declaration_id) - .and_then(|d| d.as_buffer()) - else { - log::error!("bug: missing buffer outline declaration"); - return None; - }; - if declaration.item_range.contains_inclusive(&range) { - return Some((*declaration_id, declaration)); - } - return None; - }); - itertools::Either::Right(iter) - } - - pub fn file_declaration_count(&self, declaration: &Declaration) -> usize { - match declaration { - Declaration::File { - project_entry_id, .. - } => self - .files - .get(project_entry_id) - .map(|file_state| file_state.declarations.len()) - .unwrap_or_default(), - Declaration::Buffer { buffer_id, .. } => self - .buffers - .get(buffer_id) - .map(|buffer_state| buffer_state.declarations.len()) - .unwrap_or_default(), - } - } - - fn remove_buffer_declarations( - old_declaration_ids: &[DeclarationId], - declarations: &mut SlotMap, - identifiers: &mut HashMap>, - ) { - for old_declaration_id in old_declaration_ids { - let Some(declaration) = declarations.remove(*old_declaration_id) else { - debug_panic!("declaration not found"); - continue; - }; - if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) { - identifier_declarations.remove(old_declaration_id); - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::sync::Arc; - - use gpui::TestAppContext; - use indoc::indoc; - use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust}; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use text::OffsetRangeExt as _; - use util::{path, rel_path::rel_path}; - - use crate::syntax_index::SyntaxIndex; - - #[gpui::test] - async fn test_unopen_indexed_files(cx: &mut TestAppContext) { - let (project, index, rust_lang_id) = init_test(cx).await; - let main = Identifier { - name: "main".into(), - language_id: rust_lang_id, - }; - - let index_state = index.read_with(cx, |index, _cx| index.state().clone()); - let index_state = index_state.lock().await; - cx.update(|cx| { - let decls = index_state.declarations_for_identifier::<8>(&main); - assert_eq!(decls.len(), 2); - - let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx); - assert_eq!(decl.identifier, main); - assert_eq!(decl.item_range, 0..98); - - let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx); - assert_eq!(decl.identifier, main.clone()); - assert_eq!(decl.item_range, 32..280); - }); - } - - #[gpui::test] - async fn test_parents_in_file(cx: &mut TestAppContext) { - let (project, index, rust_lang_id) = init_test(cx).await; - let test_process_data = Identifier { - name: "test_process_data".into(), - language_id: rust_lang_id, - }; - - let index_state = index.read_with(cx, |index, _cx| index.state().clone()); - let index_state = index_state.lock().await; - cx.update(|cx| { - let decls = index_state.declarations_for_identifier::<8>(&test_process_data); - assert_eq!(decls.len(), 1); - - let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx); - assert_eq!(decl.identifier, test_process_data); - - let parent_id = decl.parent.unwrap(); - let parent = index_state.declaration(parent_id).unwrap(); - let parent_decl = expect_file_decl("c.rs", &parent, &project, cx); - assert_eq!( - parent_decl.identifier, - Identifier { - name: "tests".into(), - language_id: rust_lang_id - } - ); - assert_eq!(parent_decl.parent, None); - }); - } - - #[gpui::test] - async fn test_parents_in_buffer(cx: &mut TestAppContext) { - let (project, index, rust_lang_id) = init_test(cx).await; - let test_process_data = Identifier { - name: "test_process_data".into(), - language_id: rust_lang_id, - }; - - let buffer = project - .update(cx, |project, cx| { - let project_path = project.find_project_path("c.rs", cx).unwrap(); - project.open_buffer(project_path, cx) - }) - .await - .unwrap(); - - cx.run_until_parked(); - - let index_state = index.read_with(cx, |index, _cx| index.state().clone()); - let index_state = index_state.lock().await; - cx.update(|cx| { - let decls = index_state.declarations_for_identifier::<8>(&test_process_data); - assert_eq!(decls.len(), 1); - - let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx); - assert_eq!(decl.identifier, test_process_data); - - let parent_id = decl.parent.unwrap(); - let parent = index_state.declaration(parent_id).unwrap(); - let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx); - assert_eq!( - parent_decl.identifier, - Identifier { - name: "tests".into(), - language_id: rust_lang_id - } - ); - assert_eq!(parent_decl.parent, None); - }); - - drop(buffer); - } - - #[gpui::test] - async fn test_declarations_limit(cx: &mut TestAppContext) { - let (_, index, rust_lang_id) = init_test(cx).await; - - let index_state = index.read_with(cx, |index, _cx| index.state().clone()); - let index_state = index_state.lock().await; - let decls = index_state.declarations_for_identifier::<1>(&Identifier { - name: "main".into(), - language_id: rust_lang_id, - }); - assert_eq!(decls.len(), 0); - } - - #[gpui::test] - async fn test_buffer_shadow(cx: &mut TestAppContext) { - let (project, index, rust_lang_id) = init_test(cx).await; - - let main = Identifier { - name: "main".into(), - language_id: rust_lang_id, - }; - - let buffer = project - .update(cx, |project, cx| { - let project_path = project.find_project_path("c.rs", cx).unwrap(); - project.open_buffer(project_path, cx) - }) - .await - .unwrap(); - - cx.run_until_parked(); - - let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone()); - { - let index_state = index_state_arc.lock().await; - - cx.update(|cx| { - let decls = index_state.declarations_for_identifier::<8>(&main); - assert_eq!(decls.len(), 2); - let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx); - assert_eq!(decl.identifier, main); - assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280); - - expect_file_decl("a.rs", &decls[1].1, &project, cx); - }); - } - - // Drop the buffer and wait for release - cx.update(|_| { - drop(buffer); - }); - cx.run_until_parked(); - - let index_state = index_state_arc.lock().await; - - cx.update(|cx| { - let decls = index_state.declarations_for_identifier::<8>(&main); - assert_eq!(decls.len(), 2); - expect_file_decl("a.rs", &decls[0].1, &project, cx); - expect_file_decl("c.rs", &decls[1].1, &project, cx); - }); - } - - fn expect_buffer_decl<'a>( - path: &str, - declaration: &'a Declaration, - project: &Entity, - cx: &App, - ) -> &'a BufferDeclaration { - if let Declaration::Buffer { - declaration, - project_entry_id, - .. - } = declaration - { - let project_path = project - .read(cx) - .path_for_entry(*project_entry_id, cx) - .unwrap(); - assert_eq!(project_path.path.as_ref(), rel_path(path),); - declaration - } else { - panic!("Expected a buffer declaration, found {:?}", declaration); - } - } - - fn expect_file_decl<'a>( - path: &str, - declaration: &'a Declaration, - project: &Entity, - cx: &App, - ) -> &'a FileDeclaration { - if let Declaration::File { - declaration, - project_entry_id: file, - .. - } = declaration - { - assert_eq!( - project - .read(cx) - .path_for_entry(*file, cx) - .unwrap() - .path - .as_ref(), - rel_path(path), - ); - declaration - } else { - panic!("Expected a file declaration, found {:?}", declaration); - } - } - - async fn init_test( - cx: &mut TestAppContext, - ) -> (Entity, Entity, LanguageId) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "a.rs": indoc! {r#" - fn main() { - let x = 1; - let y = 2; - let z = add(x, y); - println!("Result: {}", z); - } - - fn add(a: i32, b: i32) -> i32 { - a + b - } - "#}, - "b.rs": indoc! {" - pub struct Config { - pub name: String, - pub value: i32, - } - - impl Config { - pub fn new(name: String, value: i32) -> Self { - Config { name, value } - } - } - "}, - "c.rs": indoc! {r#" - use std::collections::HashMap; - - fn main() { - let args: Vec = std::env::args().collect(); - let data: Vec = args[1..] - .iter() - .filter_map(|s| s.parse().ok()) - .collect(); - let result = process_data(data); - println!("{:?}", result); - } - - fn process_data(data: Vec) -> HashMap { - let mut counts = HashMap::new(); - for value in data { - *counts.entry(value).or_insert(0) += 1; - } - counts - } - - #[cfg(test)] - mod tests { - use super::*; - - #[test] - fn test_process_data() { - let data = vec![1, 2, 2, 3]; - let result = process_data(data); - assert_eq!(result.get(&2), Some(&2)); - } - } - "#} - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let lang = rust_lang(); - let lang_id = lang.id(); - language_registry.add(Arc::new(lang)); - - let file_indexing_parallelism = 2; - let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx)); - cx.run_until_parked(); - - (project, index, lang_id) - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } -} diff --git a/crates/edit_prediction_context/src/text_similarity.rs b/crates/edit_prediction_context/src/text_similarity.rs deleted file mode 100644 index 308a9570206084fc223c72f2e1c49109ea157714..0000000000000000000000000000000000000000 --- a/crates/edit_prediction_context/src/text_similarity.rs +++ /dev/null @@ -1,314 +0,0 @@ -use hashbrown::HashTable; -use regex::Regex; -use std::{ - borrow::Cow, - hash::{Hash, Hasher as _}, - path::Path, - sync::LazyLock, -}; -use util::rel_path::RelPath; - -use crate::reference::Reference; - -// TODO: Consider implementing sliding window similarity matching like -// https://github.com/sourcegraph/cody-public-snapshot/blob/8e20ac6c1460c08b0db581c0204658112a246eda/vscode/src/completions/context/retrievers/jaccard-similarity/bestJaccardMatch.ts -// -// That implementation could actually be more efficient - no need to track words in the window that -// are not in the query. - -// TODO: Consider a flat sorted Vec<(String, usize)> representation. Intersection can just walk the -// two in parallel. - -static IDENTIFIER_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"\b\w+\b").unwrap()); - -/// Multiset of text occurrences for text similarity that only stores hashes and counts. -#[derive(Debug, Default)] -pub struct Occurrences { - table: HashTable, - total_count: usize, -} - -#[derive(Debug)] -struct OccurrenceEntry { - hash: u64, - count: usize, -} - -impl Occurrences { - pub fn within_string(text: &str) -> Self { - Self::from_identifiers(IDENTIFIER_REGEX.find_iter(text).map(|mat| mat.as_str())) - } - - #[allow(dead_code)] - pub fn within_references(references: &[Reference]) -> Self { - Self::from_identifiers( - references - .iter() - .map(|reference| reference.identifier.name.as_ref()), - ) - } - - pub fn from_identifiers(identifiers: impl IntoIterator>) -> Self { - let mut this = Self::default(); - // TODO: Score matches that match case higher? - // - // TODO: Also include unsplit identifier? - for identifier in identifiers { - for identifier_part in split_identifier(identifier.as_ref()) { - this.add_hash(fx_hash(&identifier_part.to_lowercase())); - } - } - this - } - - pub fn from_worktree_path(worktree_name: Option>, rel_path: &RelPath) -> Self { - if let Some(worktree_name) = worktree_name { - Self::from_identifiers( - std::iter::once(worktree_name) - .chain(iter_path_without_extension(rel_path.as_std_path())), - ) - } else { - Self::from_path(rel_path.as_std_path()) - } - } - - pub fn from_path(path: &Path) -> Self { - Self::from_identifiers(iter_path_without_extension(path)) - } - - fn add_hash(&mut self, hash: u64) { - self.table - .entry( - hash, - |entry: &OccurrenceEntry| entry.hash == hash, - |entry| entry.hash, - ) - .and_modify(|entry| entry.count += 1) - .or_insert(OccurrenceEntry { hash, count: 1 }); - self.total_count += 1; - } - - fn contains_hash(&self, hash: u64) -> bool { - self.get_count(hash) != 0 - } - - fn get_count(&self, hash: u64) -> usize { - self.table - .find(hash, |entry| entry.hash == hash) - .map(|entry| entry.count) - .unwrap_or(0) - } -} - -fn iter_path_without_extension(path: &Path) -> impl Iterator> { - let last_component: Option> = path.file_stem().map(|stem| stem.to_string_lossy()); - let mut path_components = path.components(); - path_components.next_back(); - path_components - .map(|component| component.as_os_str().to_string_lossy()) - .chain(last_component) -} - -pub fn fx_hash(data: &T) -> u64 { - let mut hasher = collections::FxHasher::default(); - data.hash(&mut hasher); - hasher.finish() -} - -// Splits camelcase / snakecase / kebabcase / pascalcase -// -// TODO: Make this more efficient / elegant. -fn split_identifier(identifier: &str) -> Vec<&str> { - let mut parts = Vec::new(); - let mut start = 0; - let chars: Vec = identifier.chars().collect(); - - if chars.is_empty() { - return parts; - } - - let mut i = 0; - while i < chars.len() { - let ch = chars[i]; - - // Handle explicit delimiters (underscore and hyphen) - if ch == '_' || ch == '-' { - if i > start { - parts.push(&identifier[start..i]); - } - start = i + 1; - i += 1; - continue; - } - - // Handle camelCase and PascalCase transitions - if i > 0 && i < chars.len() { - let prev_char = chars[i - 1]; - - // Transition from lowercase/digit to uppercase - if (prev_char.is_lowercase() || prev_char.is_ascii_digit()) && ch.is_uppercase() { - parts.push(&identifier[start..i]); - start = i; - } - // Handle sequences like "XMLParser" -> ["XML", "Parser"] - else if i + 1 < chars.len() - && ch.is_uppercase() - && chars[i + 1].is_lowercase() - && prev_char.is_uppercase() - { - parts.push(&identifier[start..i]); - start = i; - } - } - - i += 1; - } - - // Add the last part if there's any remaining - if start < identifier.len() { - parts.push(&identifier[start..]); - } - - // Filter out empty strings - parts.into_iter().filter(|s| !s.is_empty()).collect() -} - -pub fn jaccard_similarity<'a>(mut set_a: &'a Occurrences, mut set_b: &'a Occurrences) -> f32 { - if set_a.table.len() > set_b.table.len() { - std::mem::swap(&mut set_a, &mut set_b); - } - let intersection = set_a - .table - .iter() - .filter(|entry| set_b.contains_hash(entry.hash)) - .count(); - let union = set_a.table.len() + set_b.table.len() - intersection; - intersection as f32 / union as f32 -} - -// TODO -#[allow(dead_code)] -pub fn overlap_coefficient<'a>(mut set_a: &'a Occurrences, mut set_b: &'a Occurrences) -> f32 { - if set_a.table.len() > set_b.table.len() { - std::mem::swap(&mut set_a, &mut set_b); - } - let intersection = set_a - .table - .iter() - .filter(|entry| set_b.contains_hash(entry.hash)) - .count(); - intersection as f32 / set_a.table.len() as f32 -} - -// TODO -#[allow(dead_code)] -pub fn weighted_jaccard_similarity<'a>( - mut set_a: &'a Occurrences, - mut set_b: &'a Occurrences, -) -> f32 { - if set_a.table.len() > set_b.table.len() { - std::mem::swap(&mut set_a, &mut set_b); - } - - let mut numerator = 0; - let mut denominator_a = 0; - let mut used_count_b = 0; - for entry_a in set_a.table.iter() { - let count_a = entry_a.count; - let count_b = set_b.get_count(entry_a.hash); - numerator += count_a.min(count_b); - denominator_a += count_a.max(count_b); - used_count_b += count_b; - } - - let denominator = denominator_a + (set_b.total_count - used_count_b); - if denominator == 0 { - 0.0 - } else { - numerator as f32 / denominator as f32 - } -} - -pub fn weighted_overlap_coefficient<'a>( - mut set_a: &'a Occurrences, - mut set_b: &'a Occurrences, -) -> f32 { - if set_a.table.len() > set_b.table.len() { - std::mem::swap(&mut set_a, &mut set_b); - } - - let mut numerator = 0; - for entry_a in set_a.table.iter() { - let count_a = entry_a.count; - let count_b = set_b.get_count(entry_a.hash); - numerator += count_a.min(count_b); - } - - let denominator = set_a.total_count.min(set_b.total_count); - if denominator == 0 { - 0.0 - } else { - numerator as f32 / denominator as f32 - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_split_identifier() { - assert_eq!(split_identifier("snake_case"), vec!["snake", "case"]); - assert_eq!(split_identifier("kebab-case"), vec!["kebab", "case"]); - assert_eq!(split_identifier("PascalCase"), vec!["Pascal", "Case"]); - assert_eq!(split_identifier("camelCase"), vec!["camel", "Case"]); - assert_eq!(split_identifier("XMLParser"), vec!["XML", "Parser"]); - } - - #[test] - fn test_similarity_functions() { - // 10 identifier parts, 8 unique - // Repeats: 2 "outline", 2 "items" - let set_a = Occurrences::within_string( - "let mut outline_items = query_outline_items(&language, &tree, &source);", - ); - // 14 identifier parts, 11 unique - // Repeats: 2 "outline", 2 "language", 2 "tree" - let set_b = Occurrences::within_string( - "pub fn query_outline_items(language: &Language, tree: &Tree, source: &str) -> Vec {", - ); - - // 6 overlaps: "outline", "items", "query", "language", "tree", "source" - // 7 non-overlaps: "let", "mut", "pub", "fn", "vec", "item", "str" - assert_eq!(jaccard_similarity(&set_a, &set_b), 6.0 / (6.0 + 7.0)); - - // Numerator is one more than before due to both having 2 "outline". - // Denominator is the same except for 3 more due to the non-overlapping duplicates - assert_eq!( - weighted_jaccard_similarity(&set_a, &set_b), - 7.0 / (7.0 + 7.0 + 3.0) - ); - - // Numerator is the same as jaccard_similarity. Denominator is the size of the smaller set, 8. - assert_eq!(overlap_coefficient(&set_a, &set_b), 6.0 / 8.0); - - // Numerator is the same as weighted_jaccard_similarity. Denominator is the total weight of - // the smaller set, 10. - assert_eq!(weighted_overlap_coefficient(&set_a, &set_b), 7.0 / 10.0); - } - - #[test] - fn test_iter_path_without_extension() { - let mut iter = iter_path_without_extension(Path::new("")); - assert_eq!(iter.next(), None); - - let iter = iter_path_without_extension(Path::new("foo")); - assert_eq!(iter.collect::>(), ["foo"]); - - let iter = iter_path_without_extension(Path::new("foo/bar.txt")); - assert_eq!(iter.collect::>(), ["foo", "bar"]); - - let iter = iter_path_without_extension(Path::new("foo/bar/baz.txt")); - assert_eq!(iter.collect::>(), ["foo", "bar", "baz"]); - } -} diff --git a/crates/edit_prediction_types/Cargo.toml b/crates/edit_prediction_types/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..00a8577911af0afd012535fd324a68af8fd70391 --- /dev/null +++ b/crates/edit_prediction_types/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "edit_prediction_types" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/edit_prediction_types.rs" + +[dependencies] +client.workspace = true +gpui.workspace = true +language.workspace = true +text.workspace = true diff --git a/crates/zeta/LICENSE-GPL b/crates/edit_prediction_types/LICENSE-GPL similarity index 100% rename from crates/zeta/LICENSE-GPL rename to crates/edit_prediction_types/LICENSE-GPL diff --git a/crates/edit_prediction_types/src/edit_prediction_types.rs b/crates/edit_prediction_types/src/edit_prediction_types.rs new file mode 100644 index 0000000000000000000000000000000000000000..5a37aba59923598b20becd91f07633e409b2bdb7 --- /dev/null +++ b/crates/edit_prediction_types/src/edit_prediction_types.rs @@ -0,0 +1,278 @@ +use std::{ops::Range, sync::Arc}; + +use client::EditPredictionUsage; +use gpui::{App, Context, Entity, SharedString}; +use language::{Anchor, Buffer, OffsetRangeExt}; + +// TODO: Find a better home for `Direction`. +// +// This should live in an ancestor crate of `editor` and `edit_prediction`, +// but at time of writing there isn't an obvious spot. +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum Direction { + Prev, + Next, +} + +#[derive(Clone)] +pub enum EditPrediction { + /// Edits within the buffer that requested the prediction + Local { + id: Option, + edits: Vec<(Range, Arc)>, + edit_preview: Option, + }, + /// Jump to a different file from the one that requested the prediction + Jump { + id: Option, + snapshot: language::BufferSnapshot, + target: language::Anchor, + }, +} + +pub enum DataCollectionState { + /// The provider doesn't support data collection. + Unsupported, + /// Data collection is enabled. + Enabled { is_project_open_source: bool }, + /// Data collection is disabled or unanswered. + Disabled { is_project_open_source: bool }, +} + +impl DataCollectionState { + pub fn is_supported(&self) -> bool { + !matches!(self, DataCollectionState::Unsupported) + } + + pub fn is_enabled(&self) -> bool { + matches!(self, DataCollectionState::Enabled { .. }) + } + + pub fn is_project_open_source(&self) -> bool { + match self { + Self::Enabled { + is_project_open_source, + } + | Self::Disabled { + is_project_open_source, + } => *is_project_open_source, + _ => false, + } + } +} + +pub trait EditPredictionDelegate: 'static + Sized { + fn name() -> &'static str; + fn display_name() -> &'static str; + fn show_predictions_in_menu() -> bool; + fn show_tab_accept_marker() -> bool { + false + } + fn supports_jump_to_edit() -> bool { + true + } + + fn data_collection_state(&self, _cx: &App) -> DataCollectionState { + DataCollectionState::Unsupported + } + + fn usage(&self, _cx: &App) -> Option { + None + } + + fn toggle_data_collection(&mut self, _cx: &mut App) {} + fn is_enabled( + &self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &App, + ) -> bool; + fn is_refreshing(&self, cx: &App) -> bool; + fn refresh( + &mut self, + buffer: Entity, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut Context, + ); + fn accept(&mut self, cx: &mut Context); + fn discard(&mut self, cx: &mut Context); + fn did_show(&mut self, _cx: &mut Context) {} + fn suggest( + &mut self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut Context, + ) -> Option; +} + +pub trait EditPredictionDelegateHandle { + fn name(&self) -> &'static str; + fn display_name(&self) -> &'static str; + fn is_enabled( + &self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &App, + ) -> bool; + fn show_predictions_in_menu(&self) -> bool; + fn show_tab_accept_marker(&self) -> bool; + fn supports_jump_to_edit(&self) -> bool; + fn data_collection_state(&self, cx: &App) -> DataCollectionState; + fn usage(&self, cx: &App) -> Option; + fn toggle_data_collection(&self, cx: &mut App); + fn is_refreshing(&self, cx: &App) -> bool; + fn refresh( + &self, + buffer: Entity, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut App, + ); + fn did_show(&self, cx: &mut App); + fn accept(&self, cx: &mut App); + fn discard(&self, cx: &mut App); + fn suggest( + &self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut App, + ) -> Option; +} + +impl EditPredictionDelegateHandle for Entity +where + T: EditPredictionDelegate, +{ + fn name(&self) -> &'static str { + T::name() + } + + fn display_name(&self) -> &'static str { + T::display_name() + } + + fn show_predictions_in_menu(&self) -> bool { + T::show_predictions_in_menu() + } + + fn show_tab_accept_marker(&self) -> bool { + T::show_tab_accept_marker() + } + + fn supports_jump_to_edit(&self) -> bool { + T::supports_jump_to_edit() + } + + fn data_collection_state(&self, cx: &App) -> DataCollectionState { + self.read(cx).data_collection_state(cx) + } + + fn usage(&self, cx: &App) -> Option { + self.read(cx).usage(cx) + } + + fn toggle_data_collection(&self, cx: &mut App) { + self.update(cx, |this, cx| this.toggle_data_collection(cx)) + } + + fn is_enabled( + &self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &App, + ) -> bool { + self.read(cx).is_enabled(buffer, cursor_position, cx) + } + + fn is_refreshing(&self, cx: &App) -> bool { + self.read(cx).is_refreshing(cx) + } + + fn refresh( + &self, + buffer: Entity, + cursor_position: language::Anchor, + debounce: bool, + cx: &mut App, + ) { + self.update(cx, |this, cx| { + this.refresh(buffer, cursor_position, debounce, cx) + }) + } + + fn accept(&self, cx: &mut App) { + self.update(cx, |this, cx| this.accept(cx)) + } + + fn discard(&self, cx: &mut App) { + self.update(cx, |this, cx| this.discard(cx)) + } + + fn did_show(&self, cx: &mut App) { + self.update(cx, |this, cx| this.did_show(cx)) + } + + fn suggest( + &self, + buffer: &Entity, + cursor_position: language::Anchor, + cx: &mut App, + ) -> Option { + self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx)) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum EditPredictionGranularity { + Word, + Line, + Full, +} +/// Returns edits updated based on user edits since the old snapshot. None is returned if any user +/// edit is not a prefix of a predicted insertion. +pub fn interpolate_edits( + old_snapshot: &text::BufferSnapshot, + new_snapshot: &text::BufferSnapshot, + current_edits: &[(Range, Arc)], +) -> Option, Arc)>> { + let mut edits = Vec::new(); + + let mut model_edits = current_edits.iter().peekable(); + for user_edit in new_snapshot.edits_since::(&old_snapshot.version) { + while let Some((model_old_range, _)) = model_edits.peek() { + let model_old_range = model_old_range.to_offset(old_snapshot); + if model_old_range.end < user_edit.old.start { + let (model_old_range, model_new_text) = model_edits.next().unwrap(); + edits.push((model_old_range.clone(), model_new_text.clone())); + } else { + break; + } + } + + if let Some((model_old_range, model_new_text)) = model_edits.peek() { + let model_old_offset_range = model_old_range.to_offset(old_snapshot); + if user_edit.old == model_old_offset_range { + let user_new_text = new_snapshot + .text_for_range(user_edit.new.clone()) + .collect::(); + + if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) { + if !model_suffix.is_empty() { + let anchor = old_snapshot.anchor_after(user_edit.old.end); + edits.push((anchor..anchor, model_suffix.into())); + } + + model_edits.next(); + continue; + } + } + } + + return None; + } + + edits.extend(model_edits.cloned()); + + if edits.is_empty() { None } else { Some(edits) } +} diff --git a/crates/edit_prediction_button/Cargo.toml b/crates/edit_prediction_ui/Cargo.toml similarity index 71% rename from crates/edit_prediction_button/Cargo.toml rename to crates/edit_prediction_ui/Cargo.toml index 189db7f7bac3eaea36a154424c4e7702f1387d24..b406a450601bef908c27a48be14fe9b1f2204c08 100644 --- a/crates/edit_prediction_button/Cargo.toml +++ b/crates/edit_prediction_ui/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "edit_prediction_button" +name = "edit_prediction_ui" version = "0.1.0" edition.workspace = true publish.workspace = true @@ -9,32 +9,45 @@ license = "GPL-3.0-or-later" workspace = true [lib] -path = "src/edit_prediction_button.rs" +path = "src/edit_prediction_ui.rs" doctest = false [dependencies] anyhow.workspace = true +buffer_diff.workspace = true +git.workspace = true +log.workspace = true +time.workspace = true client.workspace = true cloud_llm_client.workspace = true codestral.workspace = true +command_palette_hooks.workspace = true copilot.workspace = true +edit_prediction_types.workspace = true +edit_prediction.workspace = true editor.workspace = true feature_flags.workspace = true fs.workspace = true +futures.workspace = true gpui.workspace = true indoc.workspace = true -edit_prediction.workspace = true language.workspace = true +markdown.workspace = true +menu.workspace = true +multi_buffer.workspace = true paths.workspace = true project.workspace = true regex.workspace = true settings.workspace = true supermaven.workspace = true telemetry.workspace = true +text.workspace = true +theme.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true zed_actions.workspace = true -zeta.workspace = true +zeta_prompt.workspace = true [dev-dependencies] copilot = { workspace = true, features = ["test-support"] } diff --git a/crates/zeta2/LICENSE-GPL b/crates/edit_prediction_ui/LICENSE-GPL similarity index 100% rename from crates/zeta2/LICENSE-GPL rename to crates/edit_prediction_ui/LICENSE-GPL diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs similarity index 60% rename from crates/edit_prediction_button/src/edit_prediction_button.rs rename to crates/edit_prediction_ui/src/edit_prediction_button.rs index 6e9000bc62eea94d5c48dca2416781f46428522c..0dcea477200eef9d1eeb6adeff98f47332d751ca 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -1,15 +1,21 @@ use anyhow::Result; use client::{Client, UserStore, zed_urls}; use cloud_llm_client::UsageLimit; -use codestral::CodestralCompletionProvider; +use codestral::CodestralEditPredictionDelegate; use copilot::{Copilot, Status}; -use editor::{Editor, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll}; -use feature_flags::{FeatureFlagAppExt, PredictEditsRateCompletionsFeatureFlag}; +use edit_prediction::{ + EditPredictionStore, MercuryFeatureFlag, SweepFeatureFlag, Zeta2FeatureFlag, +}; +use edit_prediction_types::EditPredictionDelegateHandle; +use editor::{ + Editor, MultiBufferOffset, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll, +}; +use feature_flags::FeatureFlagAppExt; use fs::Fs; use gpui::{ Action, Animation, AnimationExt, App, AsyncWindowContext, Corner, Entity, FocusHandle, Focusable, IntoElement, ParentElement, Render, Subscription, WeakEntity, actions, div, - pulsating_between, + ease_in_out, pulsating_between, }; use indoc::indoc; use language::{ @@ -18,7 +24,12 @@ use language::{ }; use project::DisableAiSettings; use regex::Regex; -use settings::{Settings, SettingsStore, update_settings_file}; +use settings::{ + EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME, + EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, + EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, Settings, SettingsStore, + update_settings_file, +}; use std::{ sync::{Arc, LazyLock}, time::Duration, @@ -28,12 +39,16 @@ use ui::{ Clickable, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, IconButton, IconButtonShape, Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*, }; +use util::ResultExt as _; use workspace::{ StatusItemView, Toast, Workspace, create_and_open_local_file, item::ItemHandle, notifications::NotificationId, }; -use zed_actions::OpenBrowser; -use zeta::RateCompletions; +use zed_actions::{OpenBrowser, OpenSettingsAt}; + +use crate::{ + CaptureExample, RatePredictions, rate_prediction_modal::PredictEditsRatePredictionsFeatureFlag, +}; actions!( edit_prediction, @@ -43,7 +58,8 @@ actions!( ] ); -const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot"; +const COPILOT_SETTINGS_PATH: &str = "/settings/copilot"; +const COPILOT_SETTINGS_URL: &str = concat!("https://github.com", "/settings/copilot"); const PRIVACY_DOCS: &str = "https://zed.dev/docs/ai/privacy-and-security"; struct CopilotErrorToast; @@ -55,7 +71,7 @@ pub struct EditPredictionButton { editor_focus_handle: Option, language: Option>, file: Option>, - edit_prediction_provider: Option>, + edit_prediction_provider: Option>, fs: Arc, user_store: Entity, popover_menu_handle: PopoverMenuHandle, @@ -78,8 +94,6 @@ impl Render for EditPredictionButton { let all_language_settings = all_language_settings(None, cx); match all_language_settings.edit_predictions.provider { - EditPredictionProvider::None => div().hidden(), - EditPredictionProvider::Copilot => { let Some(copilot) = Copilot::global(cx) else { return div().hidden(); @@ -128,20 +142,21 @@ impl Render for EditPredictionButton { }), ); } - let this = cx.entity(); + let this = cx.weak_entity(); div().child( PopoverMenu::new("copilot") .menu(move |window, cx| { let current_status = Copilot::global(cx)?.read(cx).status(); - Some(match current_status { + match current_status { Status::Authorized => this.update(cx, |this, cx| { this.build_copilot_context_menu(window, cx) }), _ => this.update(cx, |this, cx| { this.build_copilot_start_menu(window, cx) }), - }) + } + .ok() }) .anchor(Corner::BottomRight) .trigger_with_tooltip( @@ -182,7 +197,7 @@ impl Render for EditPredictionButton { let icon = status.to_icon(); let tooltip_text = status.to_tooltip(); let has_menu = status.has_menu(); - let this = cx.entity(); + let this = cx.weak_entity(); let fs = self.fs.clone(); div().child( @@ -209,9 +224,11 @@ impl Render for EditPredictionButton { ) })) } - SupermavenButtonStatus::Ready => Some(this.update(cx, |this, cx| { - this.build_supermaven_context_menu(window, cx) - })), + SupermavenButtonStatus::Ready => this + .update(cx, |this, cx| { + this.build_supermaven_context_menu(window, cx) + }) + .ok(), _ => None, }) .anchor(Corner::BottomRight) @@ -231,45 +248,22 @@ impl Render for EditPredictionButton { EditPredictionProvider::Codestral => { let enabled = self.editor_enabled.unwrap_or(true); - let has_api_key = CodestralCompletionProvider::has_api_key(cx); - let fs = self.fs.clone(); - let this = cx.entity(); + let has_api_key = CodestralEditPredictionDelegate::has_api_key(cx); + let this = cx.weak_entity(); + + let tooltip_meta = if has_api_key { + "Powered by Codestral" + } else { + "Missing API key for Codestral" + }; div().child( PopoverMenu::new("codestral") .menu(move |window, cx| { - if has_api_key { - Some(this.update(cx, |this, cx| { - this.build_codestral_context_menu(window, cx) - })) - } else { - Some(ContextMenu::build(window, cx, |menu, _, _| { - let fs = fs.clone(); - - menu.entry( - "Configure Codestral API Key", - None, - move |window, cx| { - window.dispatch_action( - zed_actions::agent::OpenSettings.boxed_clone(), - cx, - ); - }, - ) - .separator() - .entry( - "Use Zed AI instead", - None, - move |_, cx| { - set_completion_provider( - fs.clone(), - cx, - EditPredictionProvider::Zed, - ) - }, - ) - })) - } + this.update(cx, |this, cx| { + this.build_codestral_context_menu(window, cx) + }) + .ok() }) .anchor(Corner::BottomRight) .trigger_with_tooltip( @@ -287,30 +281,69 @@ impl Render for EditPredictionButton { cx.theme().colors().status_bar_background, )) }), - move |_window, cx| Tooltip::for_action("Codestral", &ToggleMenu, cx), + move |_window, cx| { + Tooltip::with_meta( + "Edit Prediction", + Some(&ToggleMenu), + tooltip_meta, + cx, + ) + }, ) .with_handle(self.popover_menu_handle.clone()), ) } - - EditPredictionProvider::Zed => { + provider @ (EditPredictionProvider::Experimental(_) | EditPredictionProvider::Zed) => { let enabled = self.editor_enabled.unwrap_or(true); - let zeta_icon = if enabled { - IconName::ZedPredict - } else { - IconName::ZedPredictDisabled + let ep_icon; + let tooltip_meta; + let mut missing_token = false; + + match provider { + EditPredictionProvider::Experimental( + EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, + ) => { + ep_icon = IconName::SweepAi; + tooltip_meta = if missing_token { + "Missing API key for Sweep" + } else { + "Powered by Sweep" + }; + missing_token = edit_prediction::EditPredictionStore::try_global(cx) + .is_some_and(|ep_store| !ep_store.read(cx).has_sweep_api_token(cx)); + } + EditPredictionProvider::Experimental( + EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME, + ) => { + ep_icon = IconName::Inception; + missing_token = edit_prediction::EditPredictionStore::try_global(cx) + .is_some_and(|ep_store| !ep_store.read(cx).has_mercury_api_token(cx)); + tooltip_meta = if missing_token { + "Missing API key for Mercury" + } else { + "Powered by Mercury" + }; + } + _ => { + ep_icon = if enabled { + IconName::ZedPredict + } else { + IconName::ZedPredictDisabled + }; + tooltip_meta = "Powered by Zeta" + } }; - if zeta::should_show_upsell_modal() { + if edit_prediction::should_show_upsell_modal() { let tooltip_meta = if self.user_store.read(cx).current_user().is_some() { "Choose a Plan" } else { - "Sign In" + "Sign In To Use" }; return div().child( - IconButton::new("zed-predict-pending-button", zeta_icon) + IconButton::new("zed-predict-pending-button", ep_icon) .shape(IconButtonShape::Square) .indicator(Indicator::dot().color(Color::Muted)) .indicator_border_color(Some(cx.theme().colors().status_bar_background)) @@ -341,50 +374,73 @@ impl Render for EditPredictionButton { } let show_editor_predictions = self.editor_show_predictions; + let user = self.user_store.read(cx).current_user(); + + let indicator_color = if missing_token { + Some(Color::Error) + } else if enabled && (!show_editor_predictions || over_limit) { + Some(if over_limit { + Color::Error + } else { + Color::Muted + }) + } else { + None + }; - let icon_button = IconButton::new("zed-predict-pending-button", zeta_icon) + let icon_button = IconButton::new("zed-predict-pending-button", ep_icon) .shape(IconButtonShape::Square) - .when( - enabled && (!show_editor_predictions || over_limit), - |this| { - this.indicator(Indicator::dot().when_else( - over_limit, - |dot| dot.color(Color::Error), - |dot| dot.color(Color::Muted), - )) + .when_some(indicator_color, |this, color| { + this.indicator(Indicator::dot().color(color)) .indicator_border_color(Some(cx.theme().colors().status_bar_background)) - }, - ) + }) .when(!self.popover_menu_handle.is_deployed(), |element| { + let user = user.clone(); + element.tooltip(move |_window, cx| { - if enabled { + let description = if enabled { if show_editor_predictions { - Tooltip::for_action("Edit Prediction", &ToggleMenu, cx) + tooltip_meta + } else if user.is_none() { + "Sign In To Use" } else { - Tooltip::with_meta( - "Edit Prediction", - Some(&ToggleMenu), - "Hidden For This File", - cx, - ) + "Hidden For This File" } } else { - Tooltip::with_meta( - "Edit Prediction", - Some(&ToggleMenu), - "Disabled For This File", - cx, - ) - } + "Disabled For This File" + }; + + Tooltip::with_meta( + "Edit Prediction", + Some(&ToggleMenu), + description, + cx, + ) }) }); let this = cx.weak_entity(); - let mut popover_menu = PopoverMenu::new("zeta") - .menu(move |window, cx| { - this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx)) + let mut popover_menu = PopoverMenu::new("edit-prediction") + .when(user.is_some(), |popover_menu| { + let this = this.clone(); + + popover_menu.menu(move |window, cx| { + this.update(cx, |this, cx| { + this.build_edit_prediction_context_menu(provider, window, cx) + }) + .ok() + }) + }) + .when(user.is_none(), |popover_menu| { + let this = this.clone(); + + popover_menu.menu(move |window, cx| { + this.update(cx, |this, cx| { + this.build_zeta_upsell_context_menu(window, cx) + }) .ok() + }) }) .anchor(Corner::BottomRight) .with_handle(self.popover_menu_handle.clone()); @@ -410,6 +466,8 @@ impl Render for EditPredictionButton { div().child(popover_menu.into_any_element()) } + + EditPredictionProvider::None => div().hidden(), } } } @@ -429,7 +487,22 @@ impl EditPredictionButton { cx.observe_global::(move |_, cx| cx.notify()) .detach(); - CodestralCompletionProvider::ensure_api_key_loaded(client.http_client(), cx); + cx.observe_global::(move |_, cx| cx.notify()) + .detach(); + + let sweep_api_token_task = edit_prediction::sweep_ai::load_sweep_api_token(cx); + let mercury_api_token_task = edit_prediction::mercury::load_mercury_api_token(cx); + + cx.spawn(async move |this, cx| { + _ = futures::join!(sweep_api_token_task, mercury_api_token_task); + this.update(cx, |_, cx| { + cx.notify(); + }) + .ok(); + }) + .detach(); + + CodestralEditPredictionDelegate::ensure_api_key_loaded(client.http_client(), cx); Self { editor_subscription: None, @@ -445,11 +518,17 @@ impl EditPredictionButton { } } - fn get_available_providers(&self, cx: &App) -> Vec { + fn get_available_providers(&self, cx: &mut App) -> Vec { let mut providers = Vec::new(); providers.push(EditPredictionProvider::Zed); + if cx.has_flag::() { + providers.push(EditPredictionProvider::Experimental( + EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, + )); + } + if let Some(copilot) = Copilot::global(cx) { if matches!(copilot.read(cx).status(), Status::Authorized) { providers.push(EditPredictionProvider::Copilot); @@ -464,10 +543,30 @@ impl EditPredictionButton { } } - if CodestralCompletionProvider::has_api_key(cx) { + if CodestralEditPredictionDelegate::has_api_key(cx) { providers.push(EditPredictionProvider::Codestral); } + if cx.has_flag::() + && edit_prediction::sweep_ai::sweep_api_token(cx) + .read(cx) + .has_key() + { + providers.push(EditPredictionProvider::Experimental( + EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, + )); + } + + if cx.has_flag::() + && edit_prediction::mercury::mercury_api_token(cx) + .read(cx) + .has_key() + { + providers.push(EditPredictionProvider::Experimental( + EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME, + )); + } + providers } @@ -475,53 +574,48 @@ impl EditPredictionButton { &self, mut menu: ContextMenu, current_provider: EditPredictionProvider, - cx: &App, + cx: &mut App, ) -> ContextMenu { let available_providers = self.get_available_providers(cx); - let other_providers: Vec<_> = available_providers + let providers: Vec<_> = available_providers .into_iter() - .filter(|p| *p != current_provider && *p != EditPredictionProvider::None) + .filter(|p| *p != EditPredictionProvider::None) .collect(); - if !other_providers.is_empty() { - menu = menu.separator().header("Switch Providers"); + if !providers.is_empty() { + menu = menu.separator().header("Providers"); - for provider in other_providers { + for provider in providers { + let is_current = provider == current_provider; let fs = self.fs.clone(); - menu = match provider { - EditPredictionProvider::Zed => menu.item( - ContextMenuEntry::new("Zed AI") - .documentation_aside( - DocumentationSide::Left, - DocumentationEdge::Top, - |_| { - Label::new("Zed's edit prediction is powered by Zeta, an open-source, dataset mode.") - .into_any_element() - }, - ) - .handler(move |_, cx| { - set_completion_provider(fs.clone(), cx, provider); - }), - ), - EditPredictionProvider::Copilot => { - menu.entry("GitHub Copilot", None, move |_, cx| { - set_completion_provider(fs.clone(), cx, provider); - }) - } - EditPredictionProvider::Supermaven => { - menu.entry("Supermaven", None, move |_, cx| { - set_completion_provider(fs.clone(), cx, provider); - }) + let name = match provider { + EditPredictionProvider::Zed => "Zed AI", + EditPredictionProvider::Copilot => "GitHub Copilot", + EditPredictionProvider::Supermaven => "Supermaven", + EditPredictionProvider::Codestral => "Codestral", + EditPredictionProvider::Experimental( + EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, + ) => "Sweep", + EditPredictionProvider::Experimental( + EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME, + ) => "Mercury", + EditPredictionProvider::Experimental( + EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, + ) => "Zeta2", + EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => { + continue; } - EditPredictionProvider::Codestral => { - menu.entry("Codestral", None, move |_, cx| { - set_completion_provider(fs.clone(), cx, provider); - }) - } - EditPredictionProvider::None => continue, }; + + menu = menu.item( + ContextMenuEntry::new(name) + .toggleable(IconPosition::Start, is_current) + .handler(move |_, cx| { + set_completion_provider(fs.clone(), cx, provider); + }), + ) } } @@ -626,14 +720,7 @@ impl EditPredictionButton { let subtle_mode = matches!(current_mode, EditPredictionsMode::Subtle); let eager_mode = matches!(current_mode, EditPredictionsMode::Eager); - if matches!( - provider, - EditPredictionProvider::Zed - | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven - | EditPredictionProvider::Codestral - ) { - menu = menu + menu = menu .separator() .header("Display Modes") .item( @@ -662,104 +749,111 @@ impl EditPredictionButton { } }), ); - } menu = menu.separator().header("Privacy"); - if let Some(provider) = &self.edit_prediction_provider { - let data_collection = provider.data_collection_state(cx); - - if data_collection.is_supported() { - let provider = provider.clone(); - let enabled = data_collection.is_enabled(); - let is_open_source = data_collection.is_project_open_source(); - let is_collecting = data_collection.is_enabled(); - let (icon_name, icon_color) = if is_open_source && is_collecting { - (IconName::Check, Color::Success) - } else { - (IconName::Check, Color::Accent) - }; - - menu = menu.item( - ContextMenuEntry::new("Training Data Collection") - .toggleable(IconPosition::Start, data_collection.is_enabled()) - .icon(icon_name) - .icon_color(icon_color) - .documentation_aside(DocumentationSide::Left, DocumentationEdge::Top, move |cx| { - let (msg, label_color, icon_name, icon_color) = match (is_open_source, is_collecting) { - (true, true) => ( - "Project identified as open source, and you're sharing data.", - Color::Default, - IconName::Check, - Color::Success, - ), - (true, false) => ( - "Project identified as open source, but you're not sharing data.", - Color::Muted, - IconName::Close, - Color::Muted, - ), - (false, true) => ( - "Project not identified as open source. No data captured.", - Color::Muted, - IconName::Close, - Color::Muted, - ), - (false, false) => ( - "Project not identified as open source, and setting turned off.", - Color::Muted, - IconName::Close, - Color::Muted, - ), - }; - v_flex() - .gap_2() - .child( - Label::new(indoc!{ - "Help us improve our open dataset model by sharing data from open source repositories. \ - Zed must detect a license file in your repo for this setting to take effect. \ - Files with sensitive data and secrets are excluded by default." - }) - ) - .child( - h_flex() - .items_start() - .pt_2() - .pr_1() - .flex_1() - .gap_1p5() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .child(h_flex().flex_shrink_0().h(line_height).child(Icon::new(icon_name).size(IconSize::XSmall).color(icon_color))) - .child(div().child(msg).w_full().text_sm().text_color(label_color.color(cx))) - ) - .into_any_element() - }) - .handler(move |_, cx| { - provider.toggle_data_collection(cx); - - if !enabled { - telemetry::event!( - "Data Collection Enabled", - source = "Edit Prediction Status Menu" - ); - } else { - telemetry::event!( - "Data Collection Disabled", - source = "Edit Prediction Status Menu" - ); - } - }) - ); + if matches!( + provider, + EditPredictionProvider::Zed + | EditPredictionProvider::Experimental( + EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, + ) + ) { + if let Some(provider) = &self.edit_prediction_provider { + let data_collection = provider.data_collection_state(cx); + + if data_collection.is_supported() { + let provider = provider.clone(); + let enabled = data_collection.is_enabled(); + let is_open_source = data_collection.is_project_open_source(); + let is_collecting = data_collection.is_enabled(); + let (icon_name, icon_color) = if is_open_source && is_collecting { + (IconName::Check, Color::Success) + } else { + (IconName::Check, Color::Accent) + }; - if is_collecting && !is_open_source { menu = menu.item( - ContextMenuEntry::new("No data captured.") - .disabled(true) - .icon(IconName::Close) - .icon_color(Color::Error) - .icon_size(IconSize::Small), + ContextMenuEntry::new("Training Data Collection") + .toggleable(IconPosition::Start, data_collection.is_enabled()) + .icon(icon_name) + .icon_color(icon_color) + .documentation_aside(DocumentationSide::Left, DocumentationEdge::Top, move |cx| { + let (msg, label_color, icon_name, icon_color) = match (is_open_source, is_collecting) { + (true, true) => ( + "Project identified as open source, and you're sharing data.", + Color::Default, + IconName::Check, + Color::Success, + ), + (true, false) => ( + "Project identified as open source, but you're not sharing data.", + Color::Muted, + IconName::Close, + Color::Muted, + ), + (false, true) => ( + "Project not identified as open source. No data captured.", + Color::Muted, + IconName::Close, + Color::Muted, + ), + (false, false) => ( + "Project not identified as open source, and setting turned off.", + Color::Muted, + IconName::Close, + Color::Muted, + ), + }; + v_flex() + .gap_2() + .child( + Label::new(indoc!{ + "Help us improve our open dataset model by sharing data from open source repositories. \ + Zed must detect a license file in your repo for this setting to take effect. \ + Files with sensitive data and secrets are excluded by default." + }) + ) + .child( + h_flex() + .items_start() + .pt_2() + .pr_1() + .flex_1() + .gap_1p5() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child(h_flex().flex_shrink_0().h(line_height).child(Icon::new(icon_name).size(IconSize::XSmall).color(icon_color))) + .child(div().child(msg).w_full().text_sm().text_color(label_color.color(cx))) + ) + .into_any_element() + }) + .handler(move |_, cx| { + provider.toggle_data_collection(cx); + + if !enabled { + telemetry::event!( + "Data Collection Enabled", + source = "Edit Prediction Status Menu" + ); + } else { + telemetry::event!( + "Data Collection Disabled", + source = "Edit Prediction Status Menu" + ); + } + }) ); + + if is_collecting && !is_open_source { + menu = menu.item( + ContextMenuEntry::new("No data captured.") + .disabled(true) + .icon(IconName::Close) + .icon_color(Color::Error) + .icon_size(IconSize::Small), + ); + } } } } @@ -819,8 +913,14 @@ impl EditPredictionButton { ) .context(editor_focus_handle) .when( - cx.has_flag::(), - |this| this.action("Rate Completions", RateCompletions.boxed_clone()), + cx.has_flag::(), + |this| { + this.action( + "Capture Edit Prediction Example", + CaptureExample.boxed_clone(), + ) + .action("Rate Predictions", RatePredictions.boxed_clone()) + }, ); } @@ -832,6 +932,16 @@ impl EditPredictionButton { window: &mut Window, cx: &mut Context, ) -> Entity { + let all_language_settings = all_language_settings(None, cx); + let copilot_config = copilot::copilot_chat::CopilotChatConfiguration { + enterprise_uri: all_language_settings + .edit_predictions + .copilot + .enterprise_uri + .clone(), + }; + let settings_url = copilot_settings_url(copilot_config.enterprise_uri.as_deref()); + ContextMenu::build(window, cx, |menu, window, cx| { let menu = self.build_language_settings_menu(menu, window, cx); let menu = @@ -840,10 +950,7 @@ impl EditPredictionButton { menu.separator() .link( "Go to Copilot Settings", - OpenBrowser { - url: COPILOT_SETTINGS_URL.to_string(), - } - .boxed_clone(), + OpenBrowser { url: settings_url }.boxed_clone(), ) .action("Sign Out", copilot::SignOut.boxed_clone()) }) @@ -874,15 +981,13 @@ impl EditPredictionButton { let menu = self.add_provider_switching_section(menu, EditPredictionProvider::Codestral, cx); - menu.separator() - .entry("Configure Codestral API Key", None, move |window, cx| { - window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx); - }) + menu }) } - fn build_zeta_context_menu( + fn build_edit_prediction_context_menu( &self, + provider: EditPredictionProvider, window: &mut Window, cx: &mut Context, ) -> Entity { @@ -969,8 +1074,97 @@ impl EditPredictionButton { .separator(); } - let menu = self.build_language_settings_menu(menu, window, cx); - let menu = self.add_provider_switching_section(menu, EditPredictionProvider::Zed, cx); + menu = self.build_language_settings_menu(menu, window, cx); + + if cx.has_flag::() { + let settings = all_language_settings(None, cx); + let context_retrieval = settings.edit_predictions.use_context; + menu = menu.separator().header("Context Retrieval").item( + ContextMenuEntry::new("Enable Context Retrieval") + .toggleable(IconPosition::Start, context_retrieval) + .action(workspace::ToggleEditPrediction.boxed_clone()) + .handler({ + let fs = self.fs.clone(); + move |_, cx| { + update_settings_file(fs.clone(), cx, move |settings, _| { + settings + .project + .all_languages + .features + .get_or_insert_default() + .experimental_edit_prediction_context_retrieval = + Some(!context_retrieval) + }); + } + }), + ); + } + + menu = self.add_provider_switching_section(menu, provider, cx); + menu = menu.separator().item( + ContextMenuEntry::new("Configure Providers") + .icon(IconName::Settings) + .icon_position(IconPosition::Start) + .icon_color(Color::Muted) + .handler(move |window, cx| { + window.dispatch_action( + OpenSettingsAt { + path: "edit_predictions.providers".to_string(), + } + .boxed_clone(), + cx, + ); + }), + ); + + menu + }) + } + + fn build_zeta_upsell_context_menu( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + ContextMenu::build(window, cx, |mut menu, _window, cx| { + menu = menu + .custom_row(move |_window, cx| { + let description = indoc! { + "You get 2,000 accepted suggestions at every keystroke for free, \ + powered by Zeta, our open-source, open-data model" + }; + + v_flex() + .max_w_64() + .h(rems_from_px(148.)) + .child(render_zeta_tab_animation(cx)) + .child(Label::new("Edit Prediction")) + .child( + Label::new(description) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element() + }) + .separator() + .entry("Sign In & Start Using", None, |window, cx| { + let client = Client::global(cx); + window + .spawn(cx, async move |cx| { + client + .sign_in_with_optional_connect(true, &cx) + .await + .log_err(); + }) + .detach(); + }) + .link( + "Learn More", + OpenBrowser { + url: zed_urls::edit_prediction_docs(cx), + } + .boxed_clone(), + ); menu }) @@ -1083,7 +1277,12 @@ async fn open_disabled_globs_setting_in_editor( }); if !edits.is_empty() { - item.edit(edits, cx); + item.edit( + edits + .into_iter() + .map(|(r, s)| (MultiBufferOffset(r.start)..MultiBufferOffset(r.end), s)), + cx, + ); } let text = item.buffer().read(cx).snapshot(cx).text(); @@ -1098,6 +1297,7 @@ async fn open_disabled_globs_setting_in_editor( .map(|inner_match| inner_match.start()..inner_match.end()) }); if let Some(range) = range { + let range = MultiBufferOffset(range.start)..MultiBufferOffset(range.end); item.change_selections( SelectionEffects::scroll(Autoscroll::newest()), window, @@ -1172,3 +1372,166 @@ fn toggle_edit_prediction_mode(fs: Arc, mode: EditPredictionsMode, cx: & }); } } + +fn render_zeta_tab_animation(cx: &App) -> impl IntoElement { + let tab = |n: u64, inverted: bool| { + let text_color = cx.theme().colors().text; + + h_flex().child( + h_flex() + .text_size(TextSize::XSmall.rems(cx)) + .text_color(text_color) + .child("tab") + .with_animation( + ElementId::Integer(n), + Animation::new(Duration::from_secs(3)).repeat(), + move |tab, delta| { + let n_f32 = n as f32; + + let offset = if inverted { + 0.2 * (4.0 - n_f32) + } else { + 0.2 * n_f32 + }; + + let phase = (delta - offset + 1.0) % 1.0; + let pulse = if phase < 0.6 { + let t = phase / 0.6; + 1.0 - (0.5 - t).abs() * 2.0 + } else { + 0.0 + }; + + let eased = ease_in_out(pulse); + let opacity = 0.1 + 0.5 * eased; + + tab.text_color(text_color.opacity(opacity)) + }, + ), + ) + }; + + let tab_sequence = |inverted: bool| { + h_flex() + .gap_1() + .child(tab(0, inverted)) + .child(tab(1, inverted)) + .child(tab(2, inverted)) + .child(tab(3, inverted)) + .child(tab(4, inverted)) + }; + + h_flex() + .my_1p5() + .p_4() + .justify_center() + .gap_2() + .rounded_xs() + .border_1() + .border_dashed() + .border_color(cx.theme().colors().border) + .bg(gpui::pattern_slash( + cx.theme().colors().border.opacity(0.5), + 1., + 8., + )) + .child(tab_sequence(true)) + .child(Icon::new(IconName::ZedPredict)) + .child(tab_sequence(false)) +} + +fn copilot_settings_url(enterprise_uri: Option<&str>) -> String { + match enterprise_uri { + Some(uri) => { + format!("{}{}", uri.trim_end_matches('/'), COPILOT_SETTINGS_PATH) + } + None => COPILOT_SETTINGS_URL.to_string(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + + #[gpui::test] + async fn test_copilot_settings_url_with_enterprise_uri(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + + cx.update_global(|settings_store: &mut SettingsStore, cx| { + settings_store + .set_user_settings( + r#"{"edit_predictions":{"copilot":{"enterprise_uri":"https://my-company.ghe.com"}}}"#, + cx, + ) + .unwrap(); + }); + + let url = cx.update(|cx| { + let all_language_settings = all_language_settings(None, cx); + copilot_settings_url( + all_language_settings + .edit_predictions + .copilot + .enterprise_uri + .as_deref(), + ) + }); + + assert_eq!(url, "https://my-company.ghe.com/settings/copilot"); + } + + #[gpui::test] + async fn test_copilot_settings_url_with_enterprise_uri_trailing_slash(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + + cx.update_global(|settings_store: &mut SettingsStore, cx| { + settings_store + .set_user_settings( + r#"{"edit_predictions":{"copilot":{"enterprise_uri":"https://my-company.ghe.com/"}}}"#, + cx, + ) + .unwrap(); + }); + + let url = cx.update(|cx| { + let all_language_settings = all_language_settings(None, cx); + copilot_settings_url( + all_language_settings + .edit_predictions + .copilot + .enterprise_uri + .as_deref(), + ) + }); + + assert_eq!(url, "https://my-company.ghe.com/settings/copilot"); + } + + #[gpui::test] + async fn test_copilot_settings_url_without_enterprise_uri(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + + let url = cx.update(|cx| { + let all_language_settings = all_language_settings(None, cx); + copilot_settings_url( + all_language_settings + .edit_predictions + .copilot + .enterprise_uri + .as_deref(), + ) + }); + + assert_eq!(url, "https://github.com/settings/copilot"); + } +} diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..92d66d2bec3a7a3b35678f1d4da92fae6b071633 --- /dev/null +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -0,0 +1,370 @@ +use std::{ + any::TypeId, + collections::VecDeque, + ops::Add, + sync::Arc, + time::{Duration, Instant}, +}; + +use anyhow::Result; +use client::{Client, UserStore}; +use editor::{Editor, PathKey}; +use futures::StreamExt as _; +use gpui::{ + Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle, + Focusable, InteractiveElement as _, IntoElement as _, ParentElement as _, SharedString, + Styled as _, Task, TextAlign, Window, actions, div, pulsating_between, +}; +use multi_buffer::MultiBuffer; +use project::Project; +use text::Point; +use ui::{ + ButtonCommon, Clickable, Disableable, FluentBuilder as _, IconButton, IconName, + StyledTypography as _, h_flex, v_flex, +}; + +use edit_prediction::{ + ContextRetrievalFinishedDebugEvent, ContextRetrievalStartedDebugEvent, DebugEvent, + EditPredictionStore, +}; +use workspace::Item; + +pub struct EditPredictionContextView { + empty_focus_handle: FocusHandle, + project: Entity, + store: Entity, + runs: VecDeque, + current_ix: usize, + _update_task: Task>, +} + +#[derive(Debug)] +struct RetrievalRun { + editor: Entity, + started_at: Instant, + metadata: Vec<(&'static str, SharedString)>, + finished_at: Option, +} + +actions!( + dev, + [ + /// Go to the previous context retrieval run + EditPredictionContextGoBack, + /// Go to the next context retrieval run + EditPredictionContextGoForward + ] +); + +impl EditPredictionContextView { + pub fn new( + project: Entity, + client: &Arc, + user_store: &Entity, + window: &mut gpui::Window, + cx: &mut Context, + ) -> Self { + let store = EditPredictionStore::global(client, user_store, cx); + + let mut debug_rx = store.update(cx, |store, cx| store.debug_info(&project, cx)); + let _update_task = cx.spawn_in(window, async move |this, cx| { + while let Some(event) = debug_rx.next().await { + this.update_in(cx, |this, window, cx| { + this.handle_store_event(event, window, cx) + })?; + } + Ok(()) + }); + + Self { + empty_focus_handle: cx.focus_handle(), + project, + runs: VecDeque::new(), + current_ix: 0, + store, + _update_task, + } + } + + fn handle_store_event( + &mut self, + event: DebugEvent, + window: &mut gpui::Window, + cx: &mut Context, + ) { + match event { + DebugEvent::ContextRetrievalStarted(info) => { + if info.project_entity_id == self.project.entity_id() { + self.handle_context_retrieval_started(info, window, cx); + } + } + DebugEvent::ContextRetrievalFinished(info) => { + if info.project_entity_id == self.project.entity_id() { + self.handle_context_retrieval_finished(info, window, cx); + } + } + DebugEvent::EditPredictionStarted(_) => {} + DebugEvent::EditPredictionFinished(_) => {} + } + } + + fn handle_context_retrieval_started( + &mut self, + info: ContextRetrievalStartedDebugEvent, + window: &mut Window, + cx: &mut Context, + ) { + if self + .runs + .back() + .is_some_and(|run| run.finished_at.is_none()) + { + self.runs.pop_back(); + } + + let multibuffer = cx.new(|_| MultiBuffer::new(language::Capability::ReadOnly)); + let editor = cx + .new(|cx| Editor::for_multibuffer(multibuffer, Some(self.project.clone()), window, cx)); + + if self.runs.len() == 32 { + self.runs.pop_front(); + } + + self.runs.push_back(RetrievalRun { + editor, + started_at: info.timestamp, + finished_at: None, + metadata: Vec::new(), + }); + + cx.notify(); + } + + fn handle_context_retrieval_finished( + &mut self, + info: ContextRetrievalFinishedDebugEvent, + window: &mut Window, + cx: &mut Context, + ) { + let Some(run) = self.runs.back_mut() else { + return; + }; + + run.finished_at = Some(info.timestamp); + run.metadata = info.metadata; + + let related_files = self + .store + .read(cx) + .context_for_project_with_buffers(&self.project, cx) + .map_or(Vec::new(), |files| files.collect()); + + let editor = run.editor.clone(); + let multibuffer = run.editor.read(cx).buffer().clone(); + + if self.current_ix + 2 == self.runs.len() { + self.current_ix += 1; + } + + cx.spawn_in(window, async move |this, cx| { + let mut paths = Vec::new(); + for (related_file, buffer) in related_files { + let point_ranges = related_file + .excerpts + .iter() + .map(|excerpt| { + Point::new(excerpt.row_range.start, 0)..Point::new(excerpt.row_range.end, 0) + }) + .collect::>(); + cx.update(|_, cx| { + let path = PathKey::for_buffer(&buffer, cx); + paths.push((path, buffer, point_ranges)); + })?; + } + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.clear(cx); + + for (path, buffer, ranges) in paths { + multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + } + })?; + + editor.update_in(cx, |editor, window, cx| { + editor.move_to_beginning(&Default::default(), window, cx); + })?; + + this.update(cx, |_, cx| cx.notify()) + }) + .detach(); + } + + fn handle_go_back( + &mut self, + _: &EditPredictionContextGoBack, + window: &mut Window, + cx: &mut Context, + ) { + self.current_ix = self.current_ix.saturating_sub(1); + cx.focus_self(window); + cx.notify(); + } + + fn handle_go_forward( + &mut self, + _: &EditPredictionContextGoForward, + window: &mut Window, + cx: &mut Context, + ) { + self.current_ix = self + .current_ix + .add(1) + .min(self.runs.len().saturating_sub(1)); + cx.focus_self(window); + cx.notify(); + } + + fn render_informational_footer( + &self, + cx: &mut Context<'_, EditPredictionContextView>, + ) -> ui::Div { + let run = &self.runs[self.current_ix]; + let new_run_started = self + .runs + .back() + .map_or(false, |latest_run| latest_run.finished_at.is_none()); + + h_flex() + .p_2() + .w_full() + .font_buffer(cx) + .text_xs() + .border_t_1() + .gap_2() + .child(v_flex().h_full().flex_1().child({ + let t0 = run.started_at; + let mut table = ui::Table::<2>::new().width(ui::px(300.)).no_ui_font(); + for (key, value) in &run.metadata { + table = table.row([key.into_any_element(), value.clone().into_any_element()]) + } + table = table.row([ + "Total Time".into_any_element(), + format!("{} ms", (run.finished_at.unwrap_or(t0) - t0).as_millis()) + .into_any_element(), + ]); + table + })) + .child( + v_flex().h_full().text_align(TextAlign::Right).child( + h_flex() + .justify_end() + .child( + IconButton::new("go-back", IconName::ChevronLeft) + .disabled(self.current_ix == 0 || self.runs.len() < 2) + .tooltip(ui::Tooltip::for_action_title( + "Go to previous run", + &EditPredictionContextGoBack, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.handle_go_back(&EditPredictionContextGoBack, window, cx); + })), + ) + .child( + div() + .child(format!("{}/{}", self.current_ix + 1, self.runs.len())) + .map(|this| { + if new_run_started { + this.with_animation( + "pulsating-count", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.opacity(delta), + ) + .into_any_element() + } else { + this.into_any_element() + } + }), + ) + .child( + IconButton::new("go-forward", IconName::ChevronRight) + .disabled(self.current_ix + 1 == self.runs.len()) + .tooltip(ui::Tooltip::for_action_title( + "Go to next run", + &EditPredictionContextGoBack, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.handle_go_forward( + &EditPredictionContextGoForward, + window, + cx, + ); + })), + ), + ), + ) + } +} + +impl Focusable for EditPredictionContextView { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.runs + .get(self.current_ix) + .map(|run| run.editor.read(cx).focus_handle(cx)) + .unwrap_or_else(|| self.empty_focus_handle.clone()) + } +} + +impl EventEmitter<()> for EditPredictionContextView {} + +impl Item for EditPredictionContextView { + type Event = (); + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + "Edit Prediction Context".into() + } + + fn buffer_kind(&self, _cx: &App) -> workspace::item::ItemBufferKind { + workspace::item::ItemBufferKind::Multibuffer + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.clone().into()) + } else if type_id == TypeId::of::() { + Some(self.runs.get(self.current_ix)?.editor.clone().into()) + } else { + None + } + } +} + +impl gpui::Render for EditPredictionContextView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { + v_flex() + .key_context("EditPredictionContext") + .on_action(cx.listener(Self::handle_go_back)) + .on_action(cx.listener(Self::handle_go_forward)) + .size_full() + .map(|this| { + if self.runs.is_empty() { + this.child( + v_flex() + .size_full() + .justify_center() + .items_center() + .child("No retrieval runs yet"), + ) + } else { + this.child(self.runs[self.current_ix].editor.clone()) + .child(self.render_informational_footer(cx)) + } + }) + } +} diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs new file mode 100644 index 0000000000000000000000000000000000000000..a762fd22aa7c32779a096fa97b2ea20ef3c9b744 --- /dev/null +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -0,0 +1,330 @@ +mod edit_prediction_button; +mod edit_prediction_context_view; +mod rate_prediction_modal; + +use std::any::{Any as _, TypeId}; +use std::path::Path; +use std::sync::Arc; + +use command_palette_hooks::CommandPaletteFilter; +use edit_prediction::{ + EditPredictionStore, ResetOnboarding, Zeta2FeatureFlag, example_spec::ExampleSpec, +}; +use edit_prediction_context_view::EditPredictionContextView; +use editor::Editor; +use feature_flags::FeatureFlagAppExt as _; +use git::repository::DiffType; +use gpui::{Window, actions}; +use language::ToPoint as _; +use log; +use project::DisableAiSettings; +use rate_prediction_modal::RatePredictionsModal; +use settings::{Settings as _, SettingsStore}; +use text::ToOffset as _; +use ui::{App, prelude::*}; +use workspace::{SplitDirection, Workspace}; + +pub use edit_prediction_button::{EditPredictionButton, ToggleMenu}; + +use crate::rate_prediction_modal::PredictEditsRatePredictionsFeatureFlag; + +actions!( + dev, + [ + /// Opens the edit prediction context view. + OpenEditPredictionContextView, + ] +); + +actions!( + edit_prediction, + [ + /// Opens the rate completions modal. + RatePredictions, + /// Captures an ExampleSpec from the current editing session and opens it as Markdown. + CaptureExample, + ] +); + +pub fn init(cx: &mut App) { + feature_gate_predict_edits_actions(cx); + + cx.observe_new(move |workspace: &mut Workspace, _, _cx| { + workspace.register_action(|workspace, _: &RatePredictions, window, cx| { + if cx.has_flag::() { + RatePredictionsModal::toggle(workspace, window, cx); + } + }); + + workspace.register_action(capture_edit_prediction_example); + workspace.register_action_renderer(|div, _, _, cx| { + let has_flag = cx.has_flag::(); + div.when(has_flag, |div| { + div.on_action(cx.listener( + move |workspace, _: &OpenEditPredictionContextView, window, cx| { + let project = workspace.project(); + workspace.split_item( + SplitDirection::Right, + Box::new(cx.new(|cx| { + EditPredictionContextView::new( + project.clone(), + workspace.client(), + workspace.user_store(), + window, + cx, + ) + })), + window, + cx, + ); + }, + )) + }) + }); + }) + .detach(); +} + +fn feature_gate_predict_edits_actions(cx: &mut App) { + let rate_completion_action_types = [TypeId::of::()]; + let reset_onboarding_action_types = [TypeId::of::()]; + let all_action_types = [ + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + zed_actions::OpenZedPredictOnboarding.type_id(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + TypeId::of::(), + ]; + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_action_types(&rate_completion_action_types); + filter.hide_action_types(&reset_onboarding_action_types); + filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]); + }); + + cx.observe_global::(move |cx| { + let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let has_feature_flag = cx.has_flag::(); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + if is_ai_disabled { + filter.hide_action_types(&all_action_types); + } else if has_feature_flag { + filter.show_action_types(&rate_completion_action_types); + } else { + filter.hide_action_types(&rate_completion_action_types); + } + }); + }) + .detach(); + + cx.observe_flag::(move |is_enabled, cx| { + if !DisableAiSettings::get_global(cx).disable_ai { + if is_enabled { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.show_action_types(&rate_completion_action_types); + }); + } else { + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_action_types(&rate_completion_action_types); + }); + } + } + }) + .detach(); +} + +fn capture_edit_prediction_example( + workspace: &mut Workspace, + _: &CaptureExample, + window: &mut Window, + cx: &mut Context, +) { + let Some(ep_store) = EditPredictionStore::try_global(cx) else { + return; + }; + + let project = workspace.project().clone(); + + let (worktree_root, repository) = { + let project_ref = project.read(cx); + let worktree_root = project_ref + .visible_worktrees(cx) + .next() + .map(|worktree| worktree.read(cx).abs_path()); + let repository = project_ref.active_repository(cx); + (worktree_root, repository) + }; + + let (Some(worktree_root), Some(repository)) = (worktree_root, repository) else { + log::error!("CaptureExampleSpec: missing worktree or active repository"); + return; + }; + + let repository_snapshot = repository.read(cx).snapshot(); + if worktree_root.as_ref() != repository_snapshot.work_directory_abs_path.as_ref() { + log::error!( + "repository is not at worktree root (repo={:?}, worktree={:?})", + repository_snapshot.work_directory_abs_path, + worktree_root + ); + return; + } + + let Some(repository_url) = repository_snapshot + .remote_origin_url + .clone() + .or_else(|| repository_snapshot.remote_upstream_url.clone()) + else { + log::error!("active repository has no origin/upstream remote url"); + return; + }; + + let Some(revision) = repository_snapshot + .head_commit + .as_ref() + .map(|commit| commit.sha.to_string()) + else { + log::error!("active repository has no head commit"); + return; + }; + + let mut events = ep_store.update(cx, |store, cx| { + store.edit_history_for_project_with_pause_split_last_event(&project, cx) + }); + + let Some(editor) = workspace.active_item_as::(cx) else { + log::error!("no active editor"); + return; + }; + + let Some(project_path) = editor.read(cx).project_path(cx) else { + log::error!("active editor has no project path"); + return; + }; + + let Some((buffer, cursor_anchor)) = editor + .read(cx) + .buffer() + .read(cx) + .text_anchor_for_position(editor.read(cx).selections.newest_anchor().head(), cx) + else { + log::error!("failed to resolve cursor buffer/anchor"); + return; + }; + + let snapshot = buffer.read(cx).snapshot(); + let cursor_point = cursor_anchor.to_point(&snapshot); + let (_editable_range, context_range) = + edit_prediction::cursor_excerpt::editable_and_context_ranges_for_cursor_position( + cursor_point, + &snapshot, + 100, + 50, + ); + + let cursor_path: Arc = repository + .read(cx) + .project_path_to_repo_path(&project_path, cx) + .map(|repo_path| Path::new(repo_path.as_unix_str()).into()) + .unwrap_or_else(|| Path::new(project_path.path.as_unix_str()).into()); + + let cursor_position = { + let context_start_offset = context_range.start.to_offset(&snapshot); + let cursor_offset = cursor_anchor.to_offset(&snapshot); + let cursor_offset_in_excerpt = cursor_offset.saturating_sub(context_start_offset); + let mut excerpt = snapshot.text_for_range(context_range).collect::(); + if cursor_offset_in_excerpt <= excerpt.len() { + excerpt.insert_str(cursor_offset_in_excerpt, zeta_prompt::CURSOR_MARKER); + } + excerpt + }; + + let markdown_language = workspace + .app_state() + .languages + .language_for_name("Markdown"); + + cx.spawn_in(window, async move |workspace_entity, cx| { + let markdown_language = markdown_language.await?; + + let uncommitted_diff_rx = repository.update(cx, |repository, cx| { + repository.diff(DiffType::HeadToWorktree, cx) + })?; + + let uncommitted_diff = match uncommitted_diff_rx.await { + Ok(Ok(diff)) => diff, + Ok(Err(error)) => { + log::error!("failed to compute uncommitted diff: {error:#}"); + return Ok(()); + } + Err(error) => { + log::error!("uncommitted diff channel dropped: {error:#}"); + return Ok(()); + } + }; + + let mut edit_history = String::new(); + let mut expected_patch = String::new(); + if let Some(last_event) = events.pop() { + for event in &events { + zeta_prompt::write_event(&mut edit_history, event); + if !edit_history.ends_with('\n') { + edit_history.push('\n'); + } + edit_history.push('\n'); + } + + zeta_prompt::write_event(&mut expected_patch, &last_event); + } + + let format = + time::format_description::parse("[year]-[month]-[day] [hour]:[minute]:[second]"); + let name = match format { + Ok(format) => { + let now = time::OffsetDateTime::now_local() + .unwrap_or_else(|_| time::OffsetDateTime::now_utc()); + now.format(&format) + .unwrap_or_else(|_| "unknown-time".to_string()) + } + Err(_) => "unknown-time".to_string(), + }; + + let markdown = ExampleSpec { + name, + repository_url, + revision, + uncommitted_diff, + cursor_path, + cursor_position, + edit_history, + expected_patch, + } + .to_markdown(); + + let buffer = project + .update(cx, |project, cx| project.create_buffer(false, cx))? + .await?; + buffer.update(cx, |buffer, cx| { + buffer.set_text(markdown, cx); + buffer.set_language(Some(markdown_language), cx); + })?; + + workspace_entity.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane( + Box::new( + cx.new(|cx| Editor::for_buffer(buffer, Some(project.clone()), window, cx)), + ), + None, + true, + window, + cx, + ); + }) + }) + .detach_and_log_err(cx); +} diff --git a/crates/edit_prediction_ui/src/rate_prediction_modal.rs b/crates/edit_prediction_ui/src/rate_prediction_modal.rs new file mode 100644 index 0000000000000000000000000000000000000000..1af65ad58083e3cccfa51ea7b674da01cad810a0 --- /dev/null +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs @@ -0,0 +1,905 @@ +use buffer_diff::{BufferDiff, BufferDiffSnapshot}; +use edit_prediction::{EditPrediction, EditPredictionRating, EditPredictionStore}; +use editor::{Editor, ExcerptRange, MultiBuffer}; +use feature_flags::FeatureFlag; +use gpui::{ + App, BorderStyle, DismissEvent, EdgesRefinement, Entity, EventEmitter, FocusHandle, Focusable, + Length, StyleRefinement, TextStyleRefinement, Window, actions, prelude::*, +}; +use language::{LanguageRegistry, Point, language_settings}; +use markdown::{Markdown, MarkdownStyle}; +use settings::Settings as _; +use std::{fmt::Write, sync::Arc, time::Duration}; +use theme::ThemeSettings; +use ui::{KeyBinding, List, ListItem, ListItemSpacing, Tooltip, prelude::*}; +use workspace::{ModalView, Workspace}; + +actions!( + zeta, + [ + /// Rates the active completion with a thumbs up. + ThumbsUpActivePrediction, + /// Rates the active completion with a thumbs down. + ThumbsDownActivePrediction, + /// Navigates to the next edit in the completion history. + NextEdit, + /// Navigates to the previous edit in the completion history. + PreviousEdit, + /// Focuses on the completions list. + FocusPredictions, + /// Previews the selected completion. + PreviewPrediction, + ] +); + +pub struct PredictEditsRatePredictionsFeatureFlag; + +impl FeatureFlag for PredictEditsRatePredictionsFeatureFlag { + const NAME: &'static str = "predict-edits-rate-completions"; +} + +pub struct RatePredictionsModal { + ep_store: Entity, + language_registry: Arc, + active_prediction: Option, + selected_index: usize, + diff_editor: Entity, + focus_handle: FocusHandle, + _subscription: gpui::Subscription, + current_view: RatePredictionView, +} + +struct ActivePrediction { + prediction: EditPrediction, + feedback_editor: Entity, + formatted_inputs: Entity, +} + +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] +enum RatePredictionView { + SuggestedEdits, + RawInput, +} + +impl RatePredictionView { + pub fn name(&self) -> &'static str { + match self { + Self::SuggestedEdits => "Suggested Edits", + Self::RawInput => "Recorded Events & Input", + } + } +} + +impl RatePredictionsModal { + pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context) { + if let Some(ep_store) = EditPredictionStore::try_global(cx) { + let language_registry = workspace.app_state().languages.clone(); + workspace.toggle_modal(window, cx, |window, cx| { + RatePredictionsModal::new(ep_store, language_registry, window, cx) + }); + + telemetry::event!("Rate Prediction Modal Open", source = "Edit Prediction"); + } + } + + pub fn new( + ep_store: Entity, + language_registry: Arc, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let subscription = cx.observe(&ep_store, |_, _, cx| cx.notify()); + + Self { + ep_store, + language_registry, + selected_index: 0, + focus_handle: cx.focus_handle(), + active_prediction: None, + _subscription: subscription, + diff_editor: cx.new(|cx| { + let multibuffer = cx.new(|_| MultiBuffer::new(language::Capability::ReadOnly)); + let mut editor = Editor::for_multibuffer(multibuffer, None, window, cx); + editor.disable_inline_diagnostics(); + editor.set_expand_all_diff_hunks(cx); + editor.set_show_git_diff_gutter(false, cx); + editor + }), + current_view: RatePredictionView::SuggestedEdits, + } + } + + fn dismiss(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { + cx.emit(DismissEvent); + } + + fn select_next(&mut self, _: &menu::SelectNext, _: &mut Window, cx: &mut Context) { + self.selected_index += 1; + self.selected_index = usize::min( + self.selected_index, + self.ep_store.read(cx).shown_predictions().count(), + ); + cx.notify(); + } + + fn select_previous( + &mut self, + _: &menu::SelectPrevious, + _: &mut Window, + cx: &mut Context, + ) { + self.selected_index = self.selected_index.saturating_sub(1); + cx.notify(); + } + + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { + let next_index = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item + .find(|(_, completion)| !completion.edits.is_empty()) + .map(|(ix, _)| ix + self.selected_index); + + if let Some(next_index) = next_index { + self.selected_index = next_index; + cx.notify(); + } + } + + fn select_prev_edit(&mut self, _: &PreviousEdit, _: &mut Window, cx: &mut Context) { + let ep_store = self.ep_store.read(cx); + let completions_len = ep_store.shown_completions_len(); + + let prev_index = self + .ep_store + .read(cx) + .shown_predictions() + .rev() + .skip((completions_len - 1) - self.selected_index) + .enumerate() + .skip(1) // Skip straight to the previous item + .find(|(_, completion)| !completion.edits.is_empty()) + .map(|(ix, _)| self.selected_index - ix); + + if let Some(prev_index) = prev_index { + self.selected_index = prev_index; + cx.notify(); + } + cx.notify(); + } + + fn select_first(&mut self, _: &menu::SelectFirst, _: &mut Window, cx: &mut Context) { + self.selected_index = 0; + cx.notify(); + } + + fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { + self.selected_index = self.ep_store.read(cx).shown_completions_len() - 1; + cx.notify(); + } + + pub fn thumbs_up_active( + &mut self, + _: &ThumbsUpActivePrediction, + window: &mut Window, + cx: &mut Context, + ) { + self.ep_store.update(cx, |ep_store, cx| { + if let Some(active) = &self.active_prediction { + ep_store.rate_prediction( + &active.prediction, + EditPredictionRating::Positive, + active.feedback_editor.read(cx).text(cx), + cx, + ); + } + }); + + let current_completion = self + .active_prediction + .as_ref() + .map(|completion| completion.prediction.clone()); + self.select_completion(current_completion, false, window, cx); + self.select_next_edit(&Default::default(), window, cx); + self.confirm(&Default::default(), window, cx); + + cx.notify(); + } + + pub fn thumbs_down_active( + &mut self, + _: &ThumbsDownActivePrediction, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(active) = &self.active_prediction { + if active.feedback_editor.read(cx).text(cx).is_empty() { + return; + } + + self.ep_store.update(cx, |ep_store, cx| { + ep_store.rate_prediction( + &active.prediction, + EditPredictionRating::Negative, + active.feedback_editor.read(cx).text(cx), + cx, + ); + }); + } + + let current_completion = self + .active_prediction + .as_ref() + .map(|completion| completion.prediction.clone()); + self.select_completion(current_completion, false, window, cx); + self.select_next_edit(&Default::default(), window, cx); + self.confirm(&Default::default(), window, cx); + + cx.notify(); + } + + fn focus_completions( + &mut self, + _: &FocusPredictions, + window: &mut Window, + cx: &mut Context, + ) { + cx.focus_self(window); + cx.notify(); + } + + fn preview_completion( + &mut self, + _: &PreviewPrediction, + window: &mut Window, + cx: &mut Context, + ) { + let completion = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .take(1) + .next() + .cloned(); + + self.select_completion(completion, false, window, cx); + } + + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let completion = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .take(1) + .next() + .cloned(); + + self.select_completion(completion, true, window, cx); + } + + pub fn select_completion( + &mut self, + prediction: Option, + focus: bool, + window: &mut Window, + cx: &mut Context, + ) { + // Avoid resetting completion rating if it's already selected. + if let Some(prediction) = prediction { + self.selected_index = self + .ep_store + .read(cx) + .shown_predictions() + .enumerate() + .find(|(_, completion_b)| prediction.id == completion_b.id) + .map(|(ix, _)| ix) + .unwrap_or(self.selected_index); + cx.notify(); + + if let Some(prev_prediction) = self.active_prediction.as_ref() + && prediction.id == prev_prediction.prediction.id + { + if focus { + window.focus(&prev_prediction.feedback_editor.focus_handle(cx), cx); + } + return; + } + + self.diff_editor.update(cx, |editor, cx| { + let new_buffer = prediction.edit_preview.build_result_buffer(cx); + let new_buffer_snapshot = new_buffer.read(cx).snapshot(); + let old_buffer_snapshot = prediction.snapshot.clone(); + let new_buffer_id = new_buffer_snapshot.remote_id(); + + let range = prediction + .edit_preview + .compute_visible_range(&prediction.edits) + .unwrap_or(Point::zero()..Point::zero()); + let start = Point::new(range.start.row.saturating_sub(5), 0); + let end = Point::new(range.end.row + 5, 0).min(new_buffer_snapshot.max_point()); + + let diff = cx.new::(|cx| { + let diff_snapshot = BufferDiffSnapshot::new_with_base_buffer( + new_buffer_snapshot.text.clone(), + Some(old_buffer_snapshot.text().into()), + old_buffer_snapshot.clone(), + cx, + ); + let diff = BufferDiff::new(&new_buffer_snapshot, cx); + cx.spawn(async move |diff, cx| { + let diff_snapshot = diff_snapshot.await; + diff.update(cx, |diff, cx| { + diff.set_snapshot(diff_snapshot, &new_buffer_snapshot.text, cx); + }) + }) + .detach(); + diff + }); + + editor.disable_header_for_buffer(new_buffer_id, cx); + editor.buffer().update(cx, |multibuffer, cx| { + multibuffer.clear(cx); + multibuffer.push_excerpts( + new_buffer, + vec![ExcerptRange { + context: start..end, + primary: start..end, + }], + cx, + ); + multibuffer.add_diff(diff, cx); + }); + }); + + let mut formatted_inputs = String::new(); + + write!(&mut formatted_inputs, "## Events\n\n").unwrap(); + + for event in &prediction.inputs.events { + formatted_inputs.push_str("```diff\n"); + zeta_prompt::write_event(&mut formatted_inputs, event.as_ref()); + formatted_inputs.push_str("```\n\n"); + } + + write!(&mut formatted_inputs, "## Related files\n\n").unwrap(); + + for included_file in prediction.inputs.related_files.as_ref() { + write!( + &mut formatted_inputs, + "### {}\n\n", + included_file.path.display() + ) + .unwrap(); + + for excerpt in included_file.excerpts.iter() { + write!( + &mut formatted_inputs, + "```{}\n{}\n```\n", + included_file.path.display(), + excerpt.text + ) + .unwrap(); + } + } + + write!(&mut formatted_inputs, "## Cursor Excerpt\n\n").unwrap(); + + writeln!( + &mut formatted_inputs, + "```{}\n{}{}\n```\n", + prediction.inputs.cursor_path.display(), + &prediction.inputs.cursor_excerpt[..prediction.inputs.cursor_offset_in_excerpt], + &prediction.inputs.cursor_excerpt[prediction.inputs.cursor_offset_in_excerpt..], + ) + .unwrap(); + + self.active_prediction = Some(ActivePrediction { + prediction, + feedback_editor: cx.new(|cx| { + let mut editor = Editor::multi_line(window, cx); + editor.disable_scrollbars_and_minimap(window, cx); + editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); + editor.set_show_line_numbers(false, cx); + editor.set_show_git_diff_gutter(false, cx); + editor.set_show_code_actions(false, cx); + editor.set_show_runnables(false, cx); + editor.set_show_breakpoints(false, cx); + editor.set_show_wrap_guides(false, cx); + editor.set_show_indent_guides(false, cx); + editor.set_show_edit_predictions(Some(false), window, cx); + editor.set_placeholder_text("Add your feedback…", window, cx); + if focus { + cx.focus_self(window); + } + editor + }), + formatted_inputs: cx.new(|cx| { + Markdown::new( + formatted_inputs.into(), + Some(self.language_registry.clone()), + None, + cx, + ) + }), + }); + } else { + self.active_prediction = None; + } + + cx.notify(); + } + + fn render_view_nav(&self, cx: &Context) -> impl IntoElement { + h_flex() + .h_8() + .px_1() + .border_b_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .gap_1() + .child( + Button::new( + ElementId::Name("suggested-edits".into()), + RatePredictionView::SuggestedEdits.name(), + ) + .label_size(LabelSize::Small) + .on_click(cx.listener(move |this, _, _window, cx| { + this.current_view = RatePredictionView::SuggestedEdits; + cx.notify(); + })) + .toggle_state(self.current_view == RatePredictionView::SuggestedEdits), + ) + .child( + Button::new( + ElementId::Name("raw-input".into()), + RatePredictionView::RawInput.name(), + ) + .label_size(LabelSize::Small) + .on_click(cx.listener(move |this, _, _window, cx| { + this.current_view = RatePredictionView::RawInput; + cx.notify(); + })) + .toggle_state(self.current_view == RatePredictionView::RawInput), + ) + } + + fn render_suggested_edits(&self, cx: &mut Context) -> Option> { + let bg_color = cx.theme().colors().editor_background; + Some( + div() + .id("diff") + .p_4() + .size_full() + .bg(bg_color) + .overflow_scroll() + .whitespace_nowrap() + .child(self.diff_editor.clone()), + ) + } + + fn render_raw_input( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option> { + let theme_settings = ThemeSettings::get_global(cx); + let buffer_font_size = theme_settings.buffer_font_size(cx); + + Some( + v_flex() + .size_full() + .overflow_hidden() + .relative() + .child( + div() + .id("raw-input") + .py_4() + .px_6() + .size_full() + .bg(cx.theme().colors().editor_background) + .overflow_scroll() + .child(if let Some(active_prediction) = &self.active_prediction { + markdown::MarkdownElement::new( + active_prediction.formatted_inputs.clone(), + MarkdownStyle { + base_text_style: window.text_style(), + syntax: cx.theme().syntax().clone(), + code_block: StyleRefinement { + text: TextStyleRefinement { + font_family: Some( + theme_settings.buffer_font.family.clone(), + ), + font_size: Some(buffer_font_size.into()), + ..Default::default() + }, + padding: EdgesRefinement { + top: Some(DefiniteLength::Absolute( + AbsoluteLength::Pixels(px(8.)), + )), + left: Some(DefiniteLength::Absolute( + AbsoluteLength::Pixels(px(8.)), + )), + right: Some(DefiniteLength::Absolute( + AbsoluteLength::Pixels(px(8.)), + )), + bottom: Some(DefiniteLength::Absolute( + AbsoluteLength::Pixels(px(8.)), + )), + }, + margin: EdgesRefinement { + top: Some(Length::Definite(px(8.).into())), + left: Some(Length::Definite(px(0.).into())), + right: Some(Length::Definite(px(0.).into())), + bottom: Some(Length::Definite(px(12.).into())), + }, + border_style: Some(BorderStyle::Solid), + border_widths: EdgesRefinement { + top: Some(AbsoluteLength::Pixels(px(1.))), + left: Some(AbsoluteLength::Pixels(px(1.))), + right: Some(AbsoluteLength::Pixels(px(1.))), + bottom: Some(AbsoluteLength::Pixels(px(1.))), + }, + border_color: Some(cx.theme().colors().border_variant), + background: Some( + cx.theme().colors().editor_background.into(), + ), + ..Default::default() + }, + ..Default::default() + }, + ) + .into_any_element() + } else { + div() + .child("No active completion".to_string()) + .into_any_element() + }), + ) + .id("raw-input-view"), + ) + } + + fn render_active_completion( + &mut self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let active_prediction = self.active_prediction.as_ref()?; + let completion_id = active_prediction.prediction.id.clone(); + let focus_handle = &self.focus_handle(cx); + + let border_color = cx.theme().colors().border; + let bg_color = cx.theme().colors().editor_background; + + let rated = self.ep_store.read(cx).is_prediction_rated(&completion_id); + let feedback_empty = active_prediction + .feedback_editor + .read(cx) + .text(cx) + .is_empty(); + + let label_container = h_flex().pl_1().gap_1p5(); + + Some( + v_flex() + .size_full() + .overflow_hidden() + .relative() + .child( + v_flex() + .size_full() + .overflow_hidden() + .relative() + .child(self.render_view_nav(cx)) + .when_some( + match self.current_view { + RatePredictionView::SuggestedEdits => { + self.render_suggested_edits(cx) + } + RatePredictionView::RawInput => self.render_raw_input(window, cx), + }, + |this, element| this.child(element), + ), + ) + .when(!rated, |this| { + this.child( + h_flex() + .p_2() + .gap_2() + .border_y_1() + .border_color(border_color) + .child( + Icon::new(IconName::Info) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + div().w_full().pr_2().flex_wrap().child( + Label::new(concat!( + "Explain why this completion is good or bad. ", + "If it's negative, describe what you expected instead." + )) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ), + ) + }) + .when(!rated, |this| { + this.child( + div() + .h_40() + .pt_1() + .bg(bg_color) + .child(active_prediction.feedback_editor.clone()), + ) + }) + .child( + h_flex() + .p_1() + .h_8() + .max_h_8() + .border_t_1() + .border_color(border_color) + .max_w_full() + .justify_between() + .children(if rated { + Some( + label_container + .child( + Icon::new(IconName::Check) + .size(IconSize::Small) + .color(Color::Success), + ) + .child(Label::new("Rated completion.").color(Color::Muted)), + ) + } else if active_prediction.prediction.edits.is_empty() { + Some( + label_container + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new("No edits produced.").color(Color::Muted)), + ) + } else { + Some(label_container) + }) + .child( + h_flex() + .gap_1() + .child( + Button::new("bad", "Bad Prediction") + .icon(IconName::ThumbsDown) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .disabled(rated || feedback_empty) + .when(feedback_empty, |this| { + this.tooltip(Tooltip::text( + "Explain what's bad about it before reporting it", + )) + }) + .key_binding(KeyBinding::for_action_in( + &ThumbsDownActivePrediction, + focus_handle, + cx, + )) + .on_click(cx.listener(move |this, _, window, cx| { + if this.active_prediction.is_some() { + this.thumbs_down_active( + &ThumbsDownActivePrediction, + window, + cx, + ); + } + })), + ) + .child( + Button::new("good", "Good Prediction") + .icon(IconName::ThumbsUp) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .disabled(rated) + .key_binding(KeyBinding::for_action_in( + &ThumbsUpActivePrediction, + focus_handle, + cx, + )) + .on_click(cx.listener(move |this, _, window, cx| { + if this.active_prediction.is_some() { + this.thumbs_up_active( + &ThumbsUpActivePrediction, + window, + cx, + ); + } + })), + ), + ), + ), + ) + } + + fn render_shown_completions(&self, cx: &Context) -> impl Iterator { + self.ep_store + .read(cx) + .shown_predictions() + .cloned() + .enumerate() + .map(|(index, completion)| { + let selected = self + .active_prediction + .as_ref() + .is_some_and(|selected| selected.prediction.id == completion.id); + let rated = self.ep_store.read(cx).is_prediction_rated(&completion.id); + + let (icon_name, icon_color, tooltip_text) = + match (rated, completion.edits.is_empty()) { + (true, _) => (IconName::Check, Color::Success, "Rated Prediction"), + (false, true) => (IconName::File, Color::Muted, "No Edits Produced"), + (false, false) => (IconName::FileDiff, Color::Accent, "Edits Available"), + }; + + let file = completion.buffer.read(cx).file(); + let file_name = file + .as_ref() + .map_or(SharedString::new_static("untitled"), |file| { + file.file_name(cx).to_string().into() + }); + let file_path = file.map(|file| file.path().as_unix_str().to_string()); + + ListItem::new(completion.id.clone()) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .focused(index == self.selected_index) + .toggle_state(selected) + .child( + h_flex() + .id("completion-content") + .gap_3() + .child(Icon::new(icon_name).color(icon_color).size(IconSize::Small)) + .child( + v_flex() + .child( + h_flex() + .gap_1() + .child(Label::new(file_name).size(LabelSize::Small)) + .when_some(file_path, |this, p| { + this.child( + Label::new(p) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), + ) + .child( + Label::new(format!( + "{} ago, {:.2?}", + format_time_ago( + completion.response_received_at.elapsed() + ), + completion.latency() + )) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ), + ) + .tooltip(Tooltip::text(tooltip_text)) + .on_click(cx.listener(move |this, _, window, cx| { + this.select_completion(Some(completion.clone()), true, window, cx); + })) + }) + } +} + +impl Render for RatePredictionsModal { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let border_color = cx.theme().colors().border; + + h_flex() + .key_context("RatePredictionModal") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::dismiss)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_prev_edit)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_next_edit)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::thumbs_up_active)) + .on_action(cx.listener(Self::thumbs_down_active)) + .on_action(cx.listener(Self::focus_completions)) + .on_action(cx.listener(Self::preview_completion)) + .bg(cx.theme().colors().elevated_surface_background) + .border_1() + .border_color(border_color) + .w(window.viewport_size().width - px(320.)) + .h(window.viewport_size().height - px(300.)) + .rounded_lg() + .shadow_lg() + .child( + v_flex() + .w_72() + .h_full() + .border_r_1() + .border_color(border_color) + .flex_shrink_0() + .overflow_hidden() + .child( + h_flex() + .h_8() + .px_2() + .justify_between() + .border_b_1() + .border_color(border_color) + .child(Icon::new(IconName::ZedPredict).size(IconSize::Small)) + .child( + Label::new("From most recent to oldest") + .color(Color::Muted) + .size(LabelSize::Small), + ), + ) + .child( + div() + .id("completion_list") + .p_0p5() + .h_full() + .overflow_y_scroll() + .child( + List::new() + .empty_message( + div() + .p_2() + .child( + Label::new(concat!( + "No completions yet. ", + "Use the editor to generate some, ", + "and make sure to rate them!" + )) + .color(Color::Muted), + ) + .into_any_element(), + ) + .children(self.render_shown_completions(cx)), + ), + ), + ) + .children(self.render_active_completion(window, cx)) + .on_mouse_down_out(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))) + } +} + +impl EventEmitter for RatePredictionsModal {} + +impl Focusable for RatePredictionsModal { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl ModalView for RatePredictionsModal {} + +fn format_time_ago(elapsed: Duration) -> String { + let seconds = elapsed.as_secs(); + if seconds < 120 { + "1 minute".to_string() + } else if seconds < 3600 { + format!("{} minutes", seconds / 60) + } else if seconds < 7200 { + "1 hour".to_string() + } else if seconds < 86400 { + format!("{} hours", seconds / 3600) + } else if seconds < 172800 { + "1 day".to_string() + } else { + format!("{} days", seconds / 86400) + } +} diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 62226f5dec2aa88f0ccdb6ad59935f6bdfe6536e..f3ed28ab05c6839a478ebbf6c81ca5e66fc372e3 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -41,6 +41,7 @@ dap.workspace = true db.workspace = true buffer_diff.workspace = true emojis.workspace = true +feature_flags.workspace = true file_icons.workspace = true futures.workspace = true fuzzy.workspace = true @@ -48,7 +49,7 @@ fs.workspace = true git.workspace = true gpui.workspace = true indoc.workspace = true -edit_prediction.workspace = true +edit_prediction_types.workspace = true itertools.workspace = true language.workspace = true linkify.workspace = true @@ -83,6 +84,8 @@ tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } tree-sitter-python = { workspace = true, optional = true } +ztracing.workspace = true +tracing.workspace = true unicode-segmentation.workspace = true unicode-script.workspace = true unindent = { workspace = true, optional = true } @@ -93,6 +96,7 @@ uuid.workspace = true vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true +zlog.workspace = true [dev-dependencies] criterion.workspace = true @@ -106,6 +110,7 @@ multi_buffer = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } release_channel.workspace = true rand.workspace = true +semver.workspace = true settings = { workspace = true, features = ["test-support"] } tempfile.workspace = true text = { workspace = true, features = ["test-support"] } @@ -116,6 +121,7 @@ tree-sitter-rust.workspace = true tree-sitter-typescript.workspace = true tree-sitter-yaml.workspace = true tree-sitter-bash.workspace = true +tree-sitter-md.workspace = true unindent.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/editor/benches/display_map.rs b/crates/editor/benches/display_map.rs index 919249ad01b87fe5fbabe1b5fe6e563179b41d10..c443bdba1c87bf9f8eac7588e2189c4fc98fb1f3 100644 --- a/crates/editor/benches/display_map.rs +++ b/crates/editor/benches/display_map.rs @@ -2,6 +2,7 @@ use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main}; use editor::MultiBuffer; use gpui::TestDispatcher; use itertools::Itertools; +use multi_buffer::MultiBufferOffset; use rand::{Rng, SeedableRng, rngs::StdRng}; use std::num::NonZeroU32; use text::Bias; @@ -24,7 +25,9 @@ fn to_tab_point_benchmark(c: &mut Criterion) { let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot.clone()); let fold_point = fold_snapshot.to_fold_point( - inlay_snapshot.to_point(InlayOffset(rng.random_range(0..length))), + inlay_snapshot.to_point(InlayOffset( + rng.random_range(MultiBufferOffset(0)..MultiBufferOffset(length)), + )), Bias::Left, ); let (_, snapshot) = TabMap::new(fold_snapshot, NonZeroU32::new(4).unwrap()); @@ -42,7 +45,7 @@ fn to_tab_point_benchmark(c: &mut Criterion) { &snapshot, |bench, snapshot| { bench.iter(|| { - snapshot.to_tab_point(fold_point); + snapshot.fold_point_to_tab_point(fold_point); }); }, ); @@ -69,12 +72,14 @@ fn to_fold_point_benchmark(c: &mut Criterion) { let (_, fold_snapshot) = FoldMap::new(inlay_snapshot.clone()); let fold_point = fold_snapshot.to_fold_point( - inlay_snapshot.to_point(InlayOffset(rng.random_range(0..length))), + inlay_snapshot.to_point(InlayOffset( + rng.random_range(MultiBufferOffset(0)..MultiBufferOffset(length)), + )), Bias::Left, ); let (_, snapshot) = TabMap::new(fold_snapshot, NonZeroU32::new(4).unwrap()); - let tab_point = snapshot.to_tab_point(fold_point); + let tab_point = snapshot.fold_point_to_tab_point(fold_point); (length, snapshot, tab_point) }; @@ -89,7 +94,7 @@ fn to_fold_point_benchmark(c: &mut Criterion) { &snapshot, |bench, snapshot| { bench.iter(|| { - snapshot.to_fold_point(tab_point, Bias::Left); + snapshot.tab_point_to_fold_point(tab_point, Bias::Left); }); }, ); diff --git a/crates/editor/benches/editor_render.rs b/crates/editor/benches/editor_render.rs index cb7f37810aad04f8c6a73440c7da93658224ba26..daaeede790cbd75a7238a81559513c5d3165a054 100644 --- a/crates/editor/benches/editor_render.rs +++ b/crates/editor/benches/editor_render.rs @@ -29,7 +29,7 @@ fn editor_input_with_1000_cursors(bencher: &mut Bencher<'_>, cx: &TestAppContext ); editor }); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor }); @@ -72,7 +72,7 @@ fn open_editor_with_one_long_line(bencher: &mut Bencher<'_>, args: &(String, Tes editor.set_style(editor::EditorStyle::default(), window, cx); editor }); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor }); }); @@ -100,7 +100,7 @@ fn editor_render(bencher: &mut Bencher<'_>, cx: &TestAppContext) { editor.set_style(editor::EditorStyle::default(), window, cx); editor }); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor }); @@ -123,7 +123,7 @@ pub fn benches() { cx.set_global(store); assets::Assets.load_test_fonts(cx); theme::init(theme::LoadThemes::JustBase, cx); - // release_channel::init(SemanticVersion::default(), cx); + // release_channel::init(semver::Version::new(0,0,0), cx); editor::init(cx); }); diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index e823b06910fba67a38754ece6ad746f5f632e613..ba36f88f6380ade2a0d70f0f7ac3eb221446b781 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -327,6 +327,23 @@ pub struct AddSelectionBelow { pub skip_soft_wrap: bool, } +/// Inserts a snippet at the cursor. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = editor)] +#[serde(deny_unknown_fields)] +pub struct InsertSnippet { + /// Language name if using a named snippet, or `None` for a global snippet + /// + /// This is typically lowercase and matches the filename containing the snippet, without the `.json` extension. + pub language: Option, + /// Name if using a named snippet + pub name: Option, + + /// Snippet body, if not using a named snippet + // todo(andrew): use `ListOrDirect` or similar for multiline snippet body + pub snippet: Option, +} + actions!( debugger, [ @@ -353,7 +370,8 @@ actions!( AcceptEditPrediction, /// Accepts a partial edit prediction. #[action(deprecated_aliases = ["editor::AcceptPartialCopilotSuggestion"])] - AcceptPartialEditPrediction, + AcceptNextWordEditPrediction, + AcceptNextLineEditPrediction, /// Applies all diff hunks in the editor. ApplyAllDiffHunks, /// Applies the diff hunk at the current position. @@ -453,8 +471,6 @@ actions!( CollapseAllDiffHunks, /// Expands macros recursively at cursor position. ExpandMacroRecursively, - /// Finds all references to the symbol at cursor. - FindAllReferences, /// Finds the next match in the search. FindNextMatch, /// Finds the previous match in the search. @@ -665,6 +681,10 @@ actions!( ReloadFile, /// Rewraps text to fit within the preferred line length. Rewrap, + /// Rotates selections or lines backward. + RotateSelectionsBackward, + /// Rotates selections or lines forward. + RotateSelectionsForward, /// Runs flycheck diagnostics. RunFlycheck, /// Scrolls the cursor to the bottom of the viewport. @@ -827,3 +847,20 @@ actions!( WrapSelectionsInTag ] ); + +/// Finds all references to the symbol at cursor. +#[derive(PartialEq, Clone, Deserialize, JsonSchema, Action)] +#[action(namespace = editor)] +#[serde(deny_unknown_fields)] +pub struct FindAllReferences { + #[serde(default = "default_true")] + pub always_open_multibuffer: bool, +} + +impl Default for FindAllReferences { + fn default() -> Self { + Self { + always_open_multibuffer: true, + } + } +} diff --git a/crates/editor/src/blink_manager.rs b/crates/editor/src/blink_manager.rs index 9c2b911f1b068d5d8cc14c3875af08033f34bc66..d99cf6a7d59d40383e572f4638b17edbf3d0da53 100644 --- a/crates/editor/src/blink_manager.rs +++ b/crates/editor/src/blink_manager.rs @@ -1,20 +1,28 @@ -use crate::EditorSettings; use gpui::Context; -use settings::Settings; use settings::SettingsStore; use smol::Timer; use std::time::Duration; +use ui::App; pub struct BlinkManager { blink_interval: Duration, blink_epoch: usize, + /// Whether the blinking is paused. blinking_paused: bool, + /// Whether the cursor should be visibly rendered or not. visible: bool, + /// Whether the blinking currently enabled. enabled: bool, + /// Whether the blinking is enabled in the settings. + blink_enabled_in_settings: fn(&App) -> bool, } impl BlinkManager { - pub fn new(blink_interval: Duration, cx: &mut Context) -> Self { + pub fn new( + blink_interval: Duration, + blink_enabled_in_settings: fn(&App) -> bool, + cx: &mut Context, + ) -> Self { // Make sure we blink the cursors if the setting is re-enabled cx.observe_global::(move |this, cx| { this.blink_cursors(this.blink_epoch, cx) @@ -27,6 +35,7 @@ impl BlinkManager { blinking_paused: false, visible: true, enabled: false, + blink_enabled_in_settings, } } @@ -55,7 +64,7 @@ impl BlinkManager { } fn blink_cursors(&mut self, epoch: usize, cx: &mut Context) { - if EditorSettings::get_global(cx).cursor_blink { + if (self.blink_enabled_in_settings)(cx) { if epoch == self.blink_epoch && self.enabled && !self.blinking_paused { self.visible = !self.visible; cx.notify(); @@ -83,6 +92,7 @@ impl BlinkManager { } } + /// Enable the blinking of the cursor. pub fn enable(&mut self, cx: &mut Context) { if self.enabled { return; @@ -95,6 +105,7 @@ impl BlinkManager { self.blink_cursors(self.blink_epoch, cx); } + /// Disable the blinking of the cursor. pub fn disable(&mut self, _cx: &mut Context) { self.visible = false; self.enabled = false; diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs new file mode 100644 index 0000000000000000000000000000000000000000..4879c5e9ce703227d3c03f4d3373512769b1515c --- /dev/null +++ b/crates/editor/src/bracket_colorization.rs @@ -0,0 +1,1374 @@ +//! Bracket highlights, also known as "rainbow brackets". +//! Uses tree-sitter queries from brackets.scm to capture bracket pairs, +//! and theme accents to colorize those. + +use std::ops::Range; + +use crate::Editor; +use collections::HashMap; +use gpui::{Context, HighlightStyle}; +use itertools::Itertools; +use language::language_settings; +use multi_buffer::{Anchor, ExcerptId}; +use ui::{ActiveTheme, utils::ensure_minimum_contrast}; + +struct ColorizedBracketsHighlight; + +impl Editor { + pub(crate) fn colorize_brackets(&mut self, invalidate: bool, cx: &mut Context) { + if !self.mode.is_full() { + return; + } + + if invalidate { + self.fetched_tree_sitter_chunks.clear(); + } + + let accents_count = cx.theme().accents().0.len(); + let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let all_excerpts = self.buffer().read(cx).excerpt_ids(); + let anchors_in_multi_buffer = |current_excerpt: ExcerptId, + text_anchors: [text::Anchor; 4]| + -> Option<[Option<_>; 4]> { + multi_buffer_snapshot + .anchors_in_excerpt(current_excerpt, text_anchors) + .or_else(|| { + all_excerpts + .iter() + .filter(|&&excerpt_id| excerpt_id != current_excerpt) + .find_map(|&excerpt_id| { + multi_buffer_snapshot.anchors_in_excerpt(excerpt_id, text_anchors) + }) + })? + .collect_array() + }; + + let bracket_matches_by_accent = self.visible_excerpts(false, cx).into_iter().fold( + HashMap::default(), + |mut acc, (excerpt_id, (buffer, _, buffer_range))| { + let buffer_snapshot = buffer.read(cx).snapshot(); + if language_settings::language_settings( + buffer_snapshot.language().map(|language| language.name()), + buffer_snapshot.file(), + cx, + ) + .colorize_brackets + { + let fetched_chunks = self + .fetched_tree_sitter_chunks + .entry(excerpt_id) + .or_default(); + + let brackets_by_accent = buffer_snapshot + .fetch_bracket_ranges( + buffer_range.start..buffer_range.end, + Some(fetched_chunks), + ) + .into_iter() + .flat_map(|(chunk_range, pairs)| { + if fetched_chunks.insert(chunk_range) { + pairs + } else { + Vec::new() + } + }) + .filter_map(|pair| { + let color_index = pair.color_index?; + + let buffer_open_range = buffer_snapshot + .anchor_before(pair.open_range.start) + ..buffer_snapshot.anchor_after(pair.open_range.end); + let buffer_close_range = buffer_snapshot + .anchor_before(pair.close_range.start) + ..buffer_snapshot.anchor_after(pair.close_range.end); + let [ + buffer_open_range_start, + buffer_open_range_end, + buffer_close_range_start, + buffer_close_range_end, + ] = anchors_in_multi_buffer( + excerpt_id, + [ + buffer_open_range.start, + buffer_open_range.end, + buffer_close_range.start, + buffer_close_range.end, + ], + )?; + let multi_buffer_open_range = + buffer_open_range_start.zip(buffer_open_range_end); + let multi_buffer_close_range = + buffer_close_range_start.zip(buffer_close_range_end); + + let mut ranges = Vec::with_capacity(2); + if let Some((open_start, open_end)) = multi_buffer_open_range { + ranges.push(open_start..open_end); + } + if let Some((close_start, close_end)) = multi_buffer_close_range { + ranges.push(close_start..close_end); + } + if ranges.is_empty() { + None + } else { + Some((color_index % accents_count, ranges)) + } + }); + + for (accent_number, new_ranges) in brackets_by_accent { + let ranges = acc + .entry(accent_number) + .or_insert_with(Vec::>::new); + + for new_range in new_ranges { + let i = ranges + .binary_search_by(|probe| { + probe.start.cmp(&new_range.start, &multi_buffer_snapshot) + }) + .unwrap_or_else(|i| i); + ranges.insert(i, new_range); + } + } + } + + acc + }, + ); + + if invalidate { + self.clear_highlights::(cx); + } + + let editor_background = cx.theme().colors().editor_background; + for (accent_number, bracket_highlights) in bracket_matches_by_accent { + let bracket_color = cx.theme().accents().color_for_index(accent_number as u32); + let adjusted_color = ensure_minimum_contrast(bracket_color, editor_background, 55.0); + let style = HighlightStyle { + color: Some(adjusted_color), + ..HighlightStyle::default() + }; + + self.highlight_text_key::( + accent_number, + bracket_highlights, + style, + true, + cx, + ); + } + } +} + +#[cfg(test)] +mod tests { + use std::{cmp, sync::Arc, time::Duration}; + + use super::*; + use crate::{ + DisplayPoint, EditorMode, EditorSnapshot, MoveToBeginning, MoveToEnd, MoveUp, + display_map::{DisplayRow, ToDisplayPoint}, + editor_tests::init_test, + test::{ + editor_lsp_test_context::EditorLspTestContext, editor_test_context::EditorTestContext, + }, + }; + use collections::HashSet; + use fs::FakeFs; + use gpui::{AppContext as _, UpdateGlobal as _}; + use indoc::indoc; + use itertools::Itertools; + use language::{Capability, markdown_lang}; + use languages::rust_lang; + use multi_buffer::{ExcerptRange, MultiBuffer}; + use pretty_assertions::assert_eq; + use project::Project; + use rope::Point; + use serde_json::json; + use settings::{AccentContent, SettingsStore}; + use text::{Bias, OffsetRangeExt, ToOffset}; + use theme::ThemeStyleContent; + use ui::SharedString; + use util::{path, post_inc}; + + #[gpui::test] + async fn test_basic_bracket_colorization(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(rust_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + cx.set_state(indoc! {r#"ˇuse std::{collections::HashMap, future::Future}; + +fn main() { + let a = one((), { () }, ()); + println!("{a}"); + println!("{a}"); + for i in 0..a { + println!("{i}"); + } + + let b = { + { + { + [([([([([([([([([([((), ())])])])])])])])])])] + } + } + }; +} + +#[rustfmt::skip] +fn one(a: (), (): (), c: ()) -> usize { 1 } + +fn two(a: HashMap>>) -> usize +where + T: Future>>>>, +{ + 2 +} +"#}); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + r#"use std::«1{collections::HashMap, future::Future}1»; + +fn main«1()1» «1{ + let a = one«2(«3()3», «3{ «4()4» }3», «3()3»)2»; + println!«2("{a}")2»; + println!«2("{a}")2»; + for i in 0..a «2{ + println!«3("{i}")3»; + }2» + + let b = «2{ + «3{ + «4{ + «5[«6(«7[«1(«2[«3(«4[«5(«6[«7(«1[«2(«3[«4(«5[«6(«7[«1(«2[«3(«4()4», «4()4»)3»]2»)1»]7»)6»]5»)4»]3»)2»]1»)7»]6»)5»]4»)3»]2»)1»]7»)6»]5» + }4» + }3» + }2»; +}1» + +#«1[rustfmt::skip]1» +fn one«1(a: «2()2», «2()2»: «2()2», c: «2()2»)1» -> usize «1{ 1 }1» + +fn two«11»«1(a: HashMap«24»>3»>2»)1» -> usize +where + T: Future«15»>4»>3»>2»>1», +«1{ + 2 +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +6 hsla(95.00, 38.00%, 62.00%, 1.00) +7 hsla(39.00, 67.00%, 69.00%, 1.00) +"#, + &bracket_colors_markup(&mut cx), + "All brackets should be colored based on their depth" + ); + } + + #[gpui::test] + async fn test_file_less_file_colorization(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let editor = cx.add_window(|window, cx| { + let multi_buffer = MultiBuffer::build_simple("fn main() {}", cx); + multi_buffer.update(cx, |multi_buffer, cx| { + multi_buffer + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_language(Some(rust_lang()), cx); + }); + }); + Editor::new(EditorMode::full(), multi_buffer, None, window, cx) + }); + + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + "fn main«1()1» «1{}1» +1 hsla(207.80, 16.20%, 69.19%, 1.00) +", + editor + .update(cx, |editor, window, cx| { + editor_bracket_colors_markup(&editor.snapshot(window, cx)) + }) + .unwrap(), + "File-less buffer should still have its brackets colorized" + ); + } + + #[gpui::test] + async fn test_markdown_bracket_colorization(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(markdown_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + cx.set_state(indoc! {r#"ˇ[LLM-powered features](./ai/overview.md), [bring and configure your own API keys](./ai/llm-providers.md#use-your-own-keys)"#}); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + r#"«1[LLM-powered features]1»«1(./ai/overview.md)1», «1[bring and configure your own API keys]1»«1(./ai/llm-providers.md#use-your-own-keys)1» +1 hsla(207.80, 16.20%, 69.19%, 1.00) +"#, + &bracket_colors_markup(&mut cx), + "All markdown brackets should be colored based on their depth" + ); + + cx.set_state(indoc! {r#"ˇ{{}}"#}); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + r#"«1{«2{}2»}1» +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +"#, + &bracket_colors_markup(&mut cx), + "All markdown brackets should be colored based on their depth, again" + ); + } + + #[gpui::test] + async fn test_bracket_colorization_when_editing(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(rust_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + cx.set_state(indoc! {r#" +struct Foo<'a, T> { + data: Vec>, +} + +fn process_data() { + let map:ˇ +} +"#}); + + cx.update_editor(|editor, window, cx| { + editor.handle_input(" Result<", window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + indoc! {r#" +struct Foo«1<'a, T>1» «1{ + data: Vec«23»>2», +}1» + +fn process_data«1()1» «1{ + let map: Result< +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +"#}, + &bracket_colors_markup(&mut cx), + "Brackets without pairs should be ignored and not colored" + ); + + cx.update_editor(|editor, window, cx| { + editor.handle_input("Option1» «1{ + data: Vec«23»>2», +}1» + +fn process_data«1()1» «1{ + let map: Result", window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + indoc! {r#" +struct Foo«1<'a, T>1» «1{ + data: Vec«23»>2», +}1» + +fn process_data«1()1» «1{ + let map: Result2» +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +"#}, + &bracket_colors_markup(&mut cx), + "When brackets start to get closed, inner brackets are re-colored based on their depth" + ); + + cx.update_editor(|editor, window, cx| { + editor.handle_input(">", window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + indoc! {r#" +struct Foo«1<'a, T>1» «1{ + data: Vec«23»>2», +}1» + +fn process_data«1()1» «1{ + let map: Result3»>2» +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +"#}, + &bracket_colors_markup(&mut cx), + ); + + cx.update_editor(|editor, window, cx| { + editor.handle_input(", ()> = unimplemented!();", window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + indoc! {r#" +struct Foo«1<'a, T>1» «1{ + data: Vec«23»>2», +}1» + +fn process_data«1()1» «1{ + let map: Result«24»>3», «3()3»>2» = unimplemented!«2()2»; +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#}, + &bracket_colors_markup(&mut cx), + ); + } + + #[gpui::test] + async fn test_bracket_colorization_chunks(cx: &mut gpui::TestAppContext) { + let comment_lines = 100; + + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(rust_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + cx.set_state(&separate_with_comment_lines( + indoc! {r#" +mod foo { + ˇfn process_data_1() { + let map: Option> = None; + } +"#}, + indoc! {r#" + fn process_data_2() { + let map: Option> = None; + } +} +"#}, + comment_lines, + )); + + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + }2» +"#}, + indoc! {r#" + fn process_data_2() { + let map: Option> = None; + } +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#}, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "First, the only visible chunk is getting the bracket highlights" + ); + + cx.update_editor(|editor, window, cx| { + editor.move_to_end(&MoveToEnd, window, cx); + editor.move_up(&MoveUp, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + }2» +"#}, + indoc! {r#" + fn process_data_2«2()2» «2{ + let map: Option«34»>3» = None; + }2» +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#}, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "After scrolling to the bottom, both chunks should have the highlights" + ); + + cx.update_editor(|editor, window, cx| { + editor.handle_input("{{}}}", window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo «1{ + fn process_data_1() { + let map: Option> = None; + } +"#}, + indoc! {r#" + fn process_data_2«2()2» «2{ + let map: Option«34»>3» = None; + } + «3{«4{}4»}3»}2»}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#}, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "First chunk's brackets are invalidated after an edit, and only 2nd (visible) chunk is re-colorized" + ); + + cx.update_editor(|editor, window, cx| { + editor.move_to_beginning(&MoveToBeginning, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + }2» +"#}, + indoc! {r#" + fn process_data_2«2()2» «2{ + let map: Option«34»>3» = None; + } + «3{«4{}4»}3»}2»}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#}, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "Scrolling back to top should re-colorize all chunks' brackets" + ); + + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.colorize_brackets = Some(false); + }); + }); + }); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo { + fn process_data_1() { + let map: Option> = None; + } +"#}, + r#" fn process_data_2() { + let map: Option> = None; + } + {{}}}} + +"#, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "Turning bracket colorization off should remove all bracket colors" + ); + + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.colorize_brackets = Some(true); + }); + }); + }); + assert_eq!( + &separate_with_comment_lines( + indoc! {r#" +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + }2» +"#}, + r#" fn process_data_2() { + let map: Option> = None; + } + {{}}}}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#, + comment_lines, + ), + &bracket_colors_markup(&mut cx), + "Turning bracket colorization back on refreshes the visible excerpts' bracket colors" + ); + } + + #[gpui::test] + async fn test_rainbow_bracket_highlights(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(rust_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + // taken from r-a https://github.com/rust-lang/rust-analyzer/blob/d733c07552a2dc0ec0cc8f4df3f0ca969a93fd90/crates/ide/src/inlay_hints.rs#L81-L297 + cx.set_state(indoc! {r#"ˇ + pub(crate) fn inlay_hints( + db: &RootDatabase, + file_id: FileId, + range_limit: Option, + config: &InlayHintsConfig, + ) -> Vec { + let _p = tracing::info_span!("inlay_hints").entered(); + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + let file = sema.parse(file_id); + let file = file.syntax(); + + let mut acc = Vec::new(); + + let Some(scope) = sema.scope(file) else { + return acc; + }; + let famous_defs = FamousDefs(&sema, scope.krate()); + let display_target = famous_defs.1.to_display_target(sema.db); + + let ctx = &mut InlayHintCtx::default(); + let mut hints = |event| { + if let Some(node) = handle_event(ctx, event) { + hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node); + } + }; + let mut preorder = file.preorder(); + salsa::attach(sema.db, || { + while let Some(event) = preorder.next() { + if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none()) + { + preorder.skip_subtree(); + continue; + } + hints(event); + } + }); + if let Some(range_limit) = range_limit { + acc.retain(|hint| range_limit.contains_range(hint.range)); + } + acc + } + + #[derive(Default)] + struct InlayHintCtx { + lifetime_stacks: Vec>, + extern_block_parent: Option, + } + + pub(crate) fn inlay_hints_resolve( + db: &RootDatabase, + file_id: FileId, + resolve_range: TextRange, + hash: u64, + config: &InlayHintsConfig, + hasher: impl Fn(&InlayHint) -> u64, + ) -> Option { + let _p = tracing::info_span!("inlay_hints_resolve").entered(); + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + let file = sema.parse(file_id); + let file = file.syntax(); + + let scope = sema.scope(file)?; + let famous_defs = FamousDefs(&sema, scope.krate()); + let mut acc = Vec::new(); + + let display_target = famous_defs.1.to_display_target(sema.db); + + let ctx = &mut InlayHintCtx::default(); + let mut hints = |event| { + if let Some(node) = handle_event(ctx, event) { + hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node); + } + }; + + let mut preorder = file.preorder(); + while let Some(event) = preorder.next() { + // This can miss some hints that require the parent of the range to calculate + if matches!(&event, WalkEvent::Enter(node) if resolve_range.intersect(node.text_range()).is_none()) + { + preorder.skip_subtree(); + continue; + } + hints(event); + } + acc.into_iter().find(|hint| hasher(hint) == hash) + } + + fn handle_event(ctx: &mut InlayHintCtx, node: WalkEvent) -> Option { + match node { + WalkEvent::Enter(node) => { + if let Some(node) = ast::AnyHasGenericParams::cast(node.clone()) { + let params = node + .generic_param_list() + .map(|it| { + it.lifetime_params() + .filter_map(|it| { + it.lifetime().map(|it| format_smolstr!("{}", &it.text()[1..])) + }) + .collect() + }) + .unwrap_or_default(); + ctx.lifetime_stacks.push(params); + } + if let Some(node) = ast::ExternBlock::cast(node.clone()) { + ctx.extern_block_parent = Some(node); + } + Some(node) + } + WalkEvent::Leave(n) => { + if ast::AnyHasGenericParams::can_cast(n.kind()) { + ctx.lifetime_stacks.pop(); + } + if ast::ExternBlock::can_cast(n.kind()) { + ctx.extern_block_parent = None; + } + None + } + } + } + + // At some point when our hir infra is fleshed out enough we should flip this and traverse the + // HIR instead of the syntax tree. + fn hints( + hints: &mut Vec, + ctx: &mut InlayHintCtx, + famous_defs @ FamousDefs(sema, _krate): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: EditionedFileId, + display_target: DisplayTarget, + node: SyntaxNode, + ) { + closing_brace::hints( + hints, + sema, + config, + display_target, + InRealFile { file_id, value: node.clone() }, + ); + if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) { + generic_param::hints(hints, famous_defs, config, any_has_generic_args); + } + + match_ast! { + match node { + ast::Expr(expr) => { + chaining::hints(hints, famous_defs, config, display_target, &expr); + adjustment::hints(hints, famous_defs, config, display_target, &expr); + match expr { + ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)), + ast::Expr::MethodCallExpr(it) => { + param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)) + } + ast::Expr::ClosureExpr(it) => { + closure_captures::hints(hints, famous_defs, config, it.clone()); + closure_ret::hints(hints, famous_defs, config, display_target, it) + }, + ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it), + _ => Some(()), + } + }, + ast::Pat(it) => { + binding_mode::hints(hints, famous_defs, config, &it); + match it { + ast::Pat::IdentPat(it) => { + bind_pat::hints(hints, famous_defs, config, display_target, &it); + } + ast::Pat::RangePat(it) => { + range_exclusive::hints(hints, famous_defs, config, it); + } + _ => {} + } + Some(()) + }, + ast::Item(it) => match it { + ast::Item::Fn(it) => { + implicit_drop::hints(hints, famous_defs, config, display_target, &it); + if let Some(extern_block) = &ctx.extern_block_parent { + extern_block::fn_hints(hints, famous_defs, config, &it, extern_block); + } + lifetime::fn_hints(hints, ctx, famous_defs, config, it) + }, + ast::Item::Static(it) => { + if let Some(extern_block) = &ctx.extern_block_parent { + extern_block::static_hints(hints, famous_defs, config, &it, extern_block); + } + implicit_static::hints(hints, famous_defs, config, Either::Left(it)) + }, + ast::Item::Const(it) => implicit_static::hints(hints, famous_defs, config, Either::Right(it)), + ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, it), + ast::Item::ExternBlock(it) => extern_block::extern_block_hints(hints, famous_defs, config, it), + _ => None, + }, + // trait object type elisions + ast::Type(ty) => match ty { + ast::Type::FnPtrType(ptr) => lifetime::fn_ptr_hints(hints, ctx, famous_defs, config, ptr), + ast::Type::PathType(path) => { + lifetime::fn_path_hints(hints, ctx, famous_defs, config, &path); + implied_dyn_trait::hints(hints, famous_defs, config, Either::Left(path)); + Some(()) + }, + ast::Type::DynTraitType(dyn_) => { + implied_dyn_trait::hints(hints, famous_defs, config, Either::Right(dyn_)); + Some(()) + }, + _ => Some(()), + }, + ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, it), + _ => Some(()), + } + }; + } + "#}); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let actual_ranges = cx.update_editor(|editor, window, cx| { + editor + .snapshot(window, cx) + .all_text_highlight_ranges::() + }); + + let mut highlighted_brackets = HashMap::default(); + for (color, range) in actual_ranges.iter().cloned() { + highlighted_brackets.insert(range, color); + } + + let last_bracket = actual_ranges + .iter() + .max_by_key(|(_, p)| p.end.row) + .unwrap() + .clone(); + + cx.update_editor(|editor, window, cx| { + let was_scrolled = editor.set_scroll_position( + gpui::Point::new(0.0, last_bracket.1.end.row as f64 * 2.0), + window, + cx, + ); + assert!(was_scrolled.0); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let ranges_after_scrolling = cx.update_editor(|editor, window, cx| { + editor + .snapshot(window, cx) + .all_text_highlight_ranges::() + }); + let new_last_bracket = ranges_after_scrolling + .iter() + .max_by_key(|(_, p)| p.end.row) + .unwrap() + .clone(); + + assert_ne!( + last_bracket, new_last_bracket, + "After scrolling down, we should have highlighted more brackets" + ); + + cx.update_editor(|editor, window, cx| { + let was_scrolled = editor.set_scroll_position(gpui::Point::default(), window, cx); + assert!(was_scrolled.0); + }); + + for _ in 0..200 { + cx.update_editor(|editor, window, cx| { + editor.apply_scroll_delta(gpui::Point::new(0.0, 0.25), window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let colored_brackets = cx.update_editor(|editor, window, cx| { + editor + .snapshot(window, cx) + .all_text_highlight_ranges::() + }); + for (color, range) in colored_brackets.clone() { + assert!( + highlighted_brackets.entry(range).or_insert(color) == &color, + "Colors should stay consistent while scrolling!" + ); + } + + let snapshot = cx.update_editor(|editor, window, cx| editor.snapshot(window, cx)); + let scroll_position = snapshot.scroll_position(); + let visible_lines = + cx.update_editor(|editor, _, _| editor.visible_line_count().unwrap()); + let visible_range = DisplayRow(scroll_position.y as u32) + ..DisplayRow((scroll_position.y + visible_lines) as u32); + + let current_highlighted_bracket_set: HashSet = HashSet::from_iter( + colored_brackets + .iter() + .flat_map(|(_, range)| [range.start, range.end]), + ); + + for highlight_range in highlighted_brackets.keys().filter(|bracket_range| { + visible_range.contains(&bracket_range.start.to_display_point(&snapshot).row()) + || visible_range.contains(&bracket_range.end.to_display_point(&snapshot).row()) + }) { + assert!( + current_highlighted_bracket_set.contains(&highlight_range.start) + || current_highlighted_bracket_set.contains(&highlight_range.end), + "Should not lose highlights while scrolling in the visible range!" + ); + } + + let buffer_snapshot = snapshot.buffer().as_singleton().unwrap().2; + for bracket_match in buffer_snapshot + .fetch_bracket_ranges( + snapshot + .display_point_to_point( + DisplayPoint::new(visible_range.start, 0), + Bias::Left, + ) + .to_offset(&buffer_snapshot) + ..snapshot + .display_point_to_point( + DisplayPoint::new( + visible_range.end, + snapshot.line_len(visible_range.end), + ), + Bias::Right, + ) + .to_offset(&buffer_snapshot), + None, + ) + .iter() + .flat_map(|entry| entry.1) + .filter(|bracket_match| bracket_match.color_index.is_some()) + { + let start = bracket_match.open_range.to_point(buffer_snapshot); + let end = bracket_match.close_range.to_point(buffer_snapshot); + let start_bracket = colored_brackets.iter().find(|(_, range)| *range == start); + assert!( + start_bracket.is_some(), + "Existing bracket start in the visible range should be highlighted. Missing color for match: \"{}\" at position {:?}", + buffer_snapshot + .text_for_range(start.start..end.end) + .collect::(), + start + ); + + let end_bracket = colored_brackets.iter().find(|(_, range)| *range == end); + assert!( + end_bracket.is_some(), + "Existing bracket end in the visible range should be highlighted. Missing color for match: \"{}\" at position {:?}", + buffer_snapshot + .text_for_range(start.start..end.end) + .collect::(), + start + ); + + assert_eq!( + start_bracket.unwrap().0, + end_bracket.unwrap().0, + "Bracket pair should be highlighted the same color!" + ) + } + } + } + + #[gpui::test] + async fn test_multi_buffer(cx: &mut gpui::TestAppContext) { + let comment_lines = 100; + + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "fn main() {{()}}", + "lib.rs": separate_with_comment_lines( + indoc! {r#" + mod foo { + fn process_data_1() { + let map: Option> = None; + // a + // b + // c + } + "#}, + indoc! {r#" + fn process_data_2() { + let other_map: Option> = None; + } + } + "#}, + comment_lines, + ) + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/lib.rs"), cx) + }) + .await + .unwrap(); + let buffer_2 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); + multi_buffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + cx, + ); + + let excerpt_rows = 5; + let rest_of_first_except_rows = 3; + multi_buffer.push_excerpts( + buffer_1.clone(), + [ + ExcerptRange::new(Point::new(0, 0)..Point::new(excerpt_rows, 0)), + ExcerptRange::new( + Point::new( + comment_lines as u32 + excerpt_rows + rest_of_first_except_rows, + 0, + ) + ..Point::new( + comment_lines as u32 + + excerpt_rows + + rest_of_first_except_rows + + excerpt_rows, + 0, + ), + ), + ], + cx, + ); + multi_buffer + }); + + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let editor_snapshot = editor + .update(cx, |editor, window, cx| editor.snapshot(window, cx)) + .unwrap(); + assert_eq!( + indoc! {r#" + + +fn main«1()1» «1{«2{«3()3»}2»}1» + + +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + // a + // b + + + fn process_data_2«2()2» «2{ + let other_map: Option«34»>3» = None; + }2» +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#,}, + &editor_bracket_colors_markup(&editor_snapshot), + "Multi buffers should have their brackets colored even if no excerpts contain the bracket counterpart (after fn `process_data_2()`) \ +or if the buffer pair spans across multiple excerpts (the one after `mod foo`)" + ); + + editor + .update(cx, |editor, window, cx| { + editor.handle_input("{[]", window, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + let editor_snapshot = editor + .update(cx, |editor, window, cx| editor.snapshot(window, cx)) + .unwrap(); + assert_eq!( + indoc! {r#" + + +{«1[]1»fn main«1()1» «1{«2{«3()3»}2»}1» + + +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«34»>3» = None; + // a + // b + + + fn process_data_2«2()2» «2{ + let other_map: Option«34»>3» = None; + }2» +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +4 hsla(187.00, 47.00%, 59.22%, 1.00) +5 hsla(355.00, 65.00%, 75.94%, 1.00) +"#,}, + &editor_bracket_colors_markup(&editor_snapshot), + ); + + cx.update(|cx| { + let theme = cx.theme().name.clone(); + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.theme.theme_overrides = HashMap::from_iter([( + theme.to_string(), + ThemeStyleContent { + accents: vec![ + AccentContent(Some(SharedString::new("#ff0000"))), + AccentContent(Some(SharedString::new("#0000ff"))), + ], + ..ThemeStyleContent::default() + }, + )]); + }); + }); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + let editor_snapshot = editor + .update(cx, |editor, window, cx| editor.snapshot(window, cx)) + .unwrap(); + assert_eq!( + indoc! {r#" + + +{«1[]1»fn main«1()1» «1{«2{«1()1»}2»}1» + + +mod foo «1{ + fn process_data_1«2()2» «2{ + let map: Option«12»>1» = None; + // a + // b + + + fn process_data_2«2()2» «2{ + let other_map: Option«12»>1» = None; + }2» +}1» + +1 hsla(0.00, 100.00%, 78.12%, 1.00) +2 hsla(240.00, 100.00%, 82.81%, 1.00) +"#,}, + &editor_bracket_colors_markup(&editor_snapshot), + "After updating theme accents, the editor should update the bracket coloring" + ); + } + + fn separate_with_comment_lines(head: &str, tail: &str, comment_lines: usize) -> String { + let mut result = head.to_string(); + result.push_str("\n"); + result.push_str(&"//\n".repeat(comment_lines)); + result.push_str(tail); + result + } + + fn bracket_colors_markup(cx: &mut EditorTestContext) -> String { + cx.update_editor(|editor, window, cx| { + editor_bracket_colors_markup(&editor.snapshot(window, cx)) + }) + } + + fn editor_bracket_colors_markup(snapshot: &EditorSnapshot) -> String { + fn display_point_to_offset(text: &str, point: DisplayPoint) -> usize { + let mut offset = 0; + for (row_idx, line) in text.lines().enumerate() { + if row_idx < point.row().0 as usize { + offset += line.len() + 1; // +1 for newline + } else { + offset += point.column() as usize; + break; + } + } + offset + } + + let actual_ranges = snapshot.all_text_highlight_ranges::(); + let editor_text = snapshot.text(); + + let mut next_index = 1; + let mut color_to_index = HashMap::default(); + let mut annotations = Vec::new(); + for (color, range) in &actual_ranges { + let color_index = *color_to_index + .entry(*color) + .or_insert_with(|| post_inc(&mut next_index)); + let start = snapshot.point_to_display_point(range.start, Bias::Left); + let end = snapshot.point_to_display_point(range.end, Bias::Right); + let start_offset = display_point_to_offset(&editor_text, start); + let end_offset = display_point_to_offset(&editor_text, end); + let bracket_text = &editor_text[start_offset..end_offset]; + let bracket_char = bracket_text.chars().next().unwrap(); + + if matches!(bracket_char, '{' | '[' | '(' | '<') { + annotations.push((start_offset, format!("«{color_index}"))); + } else { + annotations.push((end_offset, format!("{color_index}»"))); + } + } + + annotations.sort_by(|(pos_a, text_a), (pos_b, text_b)| { + pos_a.cmp(pos_b).reverse().then_with(|| { + let a_is_opening = text_a.starts_with('«'); + let b_is_opening = text_b.starts_with('«'); + match (a_is_opening, b_is_opening) { + (true, false) => cmp::Ordering::Less, + (false, true) => cmp::Ordering::Greater, + _ => cmp::Ordering::Equal, + } + }) + }); + annotations.dedup(); + + let mut markup = editor_text; + for (offset, text) in annotations { + markup.insert_str(offset, &text); + } + + markup.push_str("\n"); + for (index, color) in color_to_index + .iter() + .map(|(color, index)| (*index, *color)) + .sorted_by_key(|(index, _)| *index) + { + markup.push_str(&format!("{index} {color}\n")); + } + + markup + } +} diff --git a/crates/editor/src/code_completion_tests.rs b/crates/editor/src/code_completion_tests.rs index ec97c0ebb31952da9ad8e9e6f4f75b4b0078c4a3..4602824486ebb88f78ed529abb91ddcc1c34646f 100644 --- a/crates/editor/src/code_completion_tests.rs +++ b/crates/editor/src/code_completion_tests.rs @@ -239,6 +239,89 @@ async fn test_fuzzy_over_sort_positions(cx: &mut TestAppContext) { assert_eq!(matches[2].string, "fetch_code_lens"); } +#[gpui::test] +async fn test_semver_label_sort_by_latest_version(cx: &mut TestAppContext) { + let mut versions = [ + "10.4.112", + "10.4.22", + "10.4.2", + "10.4.20", + "10.4.21", + "10.4.12", + // Pre-release versions + "10.4.22-alpha", + "10.4.22-beta.1", + "10.4.22-rc.1", + // Build metadata versions + "10.4.21+build.123", + "10.4.20+20210327", + ]; + versions.sort_by(|a, b| { + match ( + semver::Version::parse(a).ok(), + semver::Version::parse(b).ok(), + ) { + (Some(a_ver), Some(b_ver)) => b_ver.cmp(&a_ver), + _ => std::cmp::Ordering::Equal, + } + }); + let completions: Vec<_> = versions + .iter() + .enumerate() + .map(|(i, version)| { + // This sort text would come from the LSP + let sort_text = format!("{:08}", i); + CompletionBuilder::new(version, None, &sort_text, None) + }) + .collect(); + + // Case 1: User types just the major and minor version + let matches = + filter_and_sort_matches("10.4.", &completions, SnippetSortOrder::default(), cx).await; + // Versions are ordered by recency (latest first) + let expected_versions = [ + "10.4.112", + "10.4.22", + "10.4.22-rc.1", + "10.4.22-beta.1", + "10.4.22-alpha", + "10.4.21+build.123", + "10.4.21", + "10.4.20+20210327", + "10.4.20", + "10.4.12", + "10.4.2", + ]; + for (match_item, expected) in matches.iter().zip(expected_versions.iter()) { + assert_eq!(match_item.string.as_ref() as &str, *expected); + } + + // Case 2: User types the major, minor, and patch version + let matches = + filter_and_sort_matches("10.4.2", &completions, SnippetSortOrder::default(), cx).await; + let expected_versions = [ + // Exact match comes first + "10.4.2", + // Ordered by recency with exact major, minor, and patch versions + "10.4.22", + "10.4.22-rc.1", + "10.4.22-beta.1", + "10.4.22-alpha", + "10.4.21+build.123", + "10.4.21", + "10.4.20+20210327", + "10.4.20", + // Versions with non-exact patch versions are ordered by fuzzy score + // Higher fuzzy score than 112 patch version since "2" appears before "1" + // in "12", making it rank higher than "112" + "10.4.12", + "10.4.112", + ]; + for (match_item, expected) in matches.iter().zip(expected_versions.iter()) { + assert_eq!(match_item.string.as_ref() as &str, *expected); + } +} + async fn test_for_each_prefix( target: &str, completions: &Vec, @@ -259,30 +342,55 @@ struct CompletionBuilder; impl CompletionBuilder { fn constant(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion { - Self::new(label, filter_text, sort_text, CompletionItemKind::CONSTANT) + Self::new( + label, + filter_text, + sort_text, + Some(CompletionItemKind::CONSTANT), + ) } fn function(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion { - Self::new(label, filter_text, sort_text, CompletionItemKind::FUNCTION) + Self::new( + label, + filter_text, + sort_text, + Some(CompletionItemKind::FUNCTION), + ) } fn method(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion { - Self::new(label, filter_text, sort_text, CompletionItemKind::METHOD) + Self::new( + label, + filter_text, + sort_text, + Some(CompletionItemKind::METHOD), + ) } fn variable(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion { - Self::new(label, filter_text, sort_text, CompletionItemKind::VARIABLE) + Self::new( + label, + filter_text, + sort_text, + Some(CompletionItemKind::VARIABLE), + ) } fn snippet(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion { - Self::new(label, filter_text, sort_text, CompletionItemKind::SNIPPET) + Self::new( + label, + filter_text, + sort_text, + Some(CompletionItemKind::SNIPPET), + ) } fn new( label: &str, filter_text: Option<&str>, sort_text: &str, - kind: CompletionItemKind, + kind: Option, ) -> Completion { Completion { replace_range: Anchor::MIN..Anchor::MAX, @@ -294,7 +402,7 @@ impl CompletionBuilder { server_id: LanguageServerId(0), lsp_completion: Box::new(CompletionItem { label: label.to_string(), - kind: Some(kind), + kind: kind, sort_text: Some(sort_text.to_string()), filter_text: filter_text.map(|text| text.to_string()), ..Default::default() @@ -305,6 +413,8 @@ impl CompletionBuilder { icon_path: None, insert_text_mode: None, confirm: None, + match_start: None, + snippet_deduplication_key: None, } } } diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 9e29cd955a80c7025ef2ff1ee5aaf38c665bed1a..d255effdb72a003014dff0805fa34a23d11c8c81 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -8,6 +8,7 @@ use gpui::{ use itertools::Itertools; use language::CodeLabel; use language::{Buffer, LanguageName, LanguageRegistry}; +use lsp::CompletionItemTag; use markdown::{Markdown, MarkdownElement}; use multi_buffer::{Anchor, ExcerptId}; use ordered_float::OrderedFloat; @@ -17,7 +18,6 @@ use project::{CompletionDisplayOptions, CompletionSource}; use task::DebugScenario; use task::TaskContext; -use std::collections::VecDeque; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::{ @@ -36,18 +36,21 @@ use util::ResultExt; use crate::hover_popover::{hover_markdown_style, open_markdown_url}; use crate::{ - CodeActionProvider, CompletionId, CompletionItemKind, CompletionProvider, DisplayRow, Editor, - EditorStyle, ResolvedTasks, + CodeActionProvider, CompletionId, CompletionProvider, DisplayRow, Editor, EditorStyle, + ResolvedTasks, actions::{ConfirmCodeAction, ConfirmCompletion}, split_words, styled_runs_for_code_label, }; use crate::{CodeActionSource, EditorSettings}; +use collections::{HashSet, VecDeque}; use settings::{Settings, SnippetSortOrder}; pub const MENU_GAP: Pixels = px(4.); pub const MENU_ASIDE_X_PADDING: Pixels = px(16.); pub const MENU_ASIDE_MIN_WIDTH: Pixels = px(260.); pub const MENU_ASIDE_MAX_WIDTH: Pixels = px(500.); +pub const COMPLETION_MENU_MIN_WIDTH: Pixels = px(280.); +pub const COMPLETION_MENU_MAX_WIDTH: Pixels = px(540.); // Constants for the markdown cache. The purpose of this cache is to reduce flickering due to // documentation not yet being parsed. @@ -203,6 +206,13 @@ impl CodeContextMenu { CodeContextMenu::CodeActions(_) => (), } } + + pub fn primary_scroll_handle(&self) -> UniformListScrollHandle { + match self { + CodeContextMenu::Completions(menu) => menu.scroll_handle.clone(), + CodeContextMenu::CodeActions(menu) => menu.scroll_handle.clone(), + } + } } pub enum ContextMenuOrigin { @@ -220,7 +230,9 @@ pub struct CompletionsMenu { pub is_incomplete: bool, pub buffer: Entity, pub completions: Rc>>, - match_candidates: Arc<[StringMatchCandidate]>, + /// String match candidate for each completion, grouped by `match_start`. + match_candidates: Arc<[(Option, Vec)]>, + /// Entries displayed in the menu, which is a filtered and sorted subset of `match_candidates`. pub entries: Rc>>, pub selected_item: usize, filter_task: Task<()>, @@ -298,6 +310,7 @@ impl CompletionsMenu { is_incomplete: bool, buffer: Entity, completions: Box<[Completion]>, + scroll_handle: Option, display_options: CompletionDisplayOptions, snippet_sort_order: SnippetSortOrder, language_registry: Option>, @@ -308,6 +321,8 @@ impl CompletionsMenu { .iter() .enumerate() .map(|(id, completion)| StringMatchCandidate::new(id, completion.label.filter_text())) + .into_group_map_by(|candidate| completions[candidate.id].match_start) + .into_iter() .collect(); let completions_menu = Self { @@ -325,7 +340,7 @@ impl CompletionsMenu { selected_item: 0, filter_task: Task::ready(()), cancel_filter: Arc::new(AtomicBool::new(false)), - scroll_handle: UniformListScrollHandle::new(), + scroll_handle: scroll_handle.unwrap_or_else(UniformListScrollHandle::new), scroll_handle_aside: ScrollHandle::new(), resolve_completions: true, last_rendered_range: RefCell::new(None).into(), @@ -347,6 +362,7 @@ impl CompletionsMenu { choices: &Vec, selection: Range, buffer: Entity, + scroll_handle: Option, snippet_sort_order: SnippetSortOrder, ) -> Self { let completions = choices @@ -355,6 +371,8 @@ impl CompletionsMenu { replace_range: selection.start.text_anchor..selection.end.text_anchor, new_text: choice.to_string(), label: CodeLabel::plain(choice.to_string(), None), + match_start: None, + snippet_deduplication_key: None, icon_path: None, documentation: None, confirm: None, @@ -363,11 +381,14 @@ impl CompletionsMenu { }) .collect(); - let match_candidates = choices - .iter() - .enumerate() - .map(|(id, completion)| StringMatchCandidate::new(id, completion)) - .collect(); + let match_candidates = Arc::new([( + None, + choices + .iter() + .enumerate() + .map(|(id, completion)| StringMatchCandidate::new(id, completion)) + .collect(), + )]); let entries = choices .iter() .enumerate() @@ -392,7 +413,7 @@ impl CompletionsMenu { selected_item: 0, filter_task: Task::ready(()), cancel_filter: Arc::new(AtomicBool::new(false)), - scroll_handle: UniformListScrollHandle::new(), + scroll_handle: scroll_handle.unwrap_or_else(UniformListScrollHandle::new), scroll_handle_aside: ScrollHandle::new(), resolve_completions: false, show_completion_documentation: false, @@ -497,7 +518,7 @@ impl CompletionsMenu { cx: &mut Context, ) { self.scroll_handle - .scroll_to_item(self.selected_item, ScrollStrategy::Top); + .scroll_to_item(self.selected_item, ScrollStrategy::Nearest); if let Some(provider) = provider { let entries = self.entries.borrow(); let entry = if self.selected_item < entries.len() { @@ -823,27 +844,38 @@ impl CompletionsMenu { FontWeight::BOLD.into(), ) }), - styled_runs_for_code_label(&completion.label, &style.syntax).map( - |(range, mut highlight)| { - // Ignore font weight for syntax highlighting, as we'll use it - // for fuzzy matches. - highlight.font_weight = None; - if completion - .source - .lsp_completion(false) - .and_then(|lsp_completion| lsp_completion.deprecated) - .unwrap_or(false) - { - highlight.strikethrough = Some(StrikethroughStyle { - thickness: 1.0.into(), - ..Default::default() - }); - highlight.color = Some(cx.theme().colors().text_muted); - } + styled_runs_for_code_label( + &completion.label, + &style.syntax, + &style.local_player, + ) + .map(|(range, mut highlight)| { + // Ignore font weight for syntax highlighting, as we'll use it + // for fuzzy matches. + highlight.font_weight = None; + if completion + .source + .lsp_completion(false) + .and_then(|lsp_completion| { + match (lsp_completion.deprecated, &lsp_completion.tags) { + (Some(true), _) => Some(true), + (_, Some(tags)) => { + Some(tags.contains(&CompletionItemTag::DEPRECATED)) + } + _ => None, + } + }) + .unwrap_or(false) + { + highlight.strikethrough = Some(StrikethroughStyle { + thickness: 1.0.into(), + ..Default::default() + }); + highlight.color = Some(cx.theme().colors().text_muted); + } - (range, highlight) - }, - ), + (range, highlight) + }), ); let completion_label = StyledText::new(completion.label.text.clone()) @@ -888,33 +920,36 @@ impl CompletionsMenu { }) }); - div().min_w(px(280.)).max_w(px(540.)).child( - ListItem::new(mat.candidate_id) - .inset(true) - .toggle_state(item_ix == selected_item) - .on_click(cx.listener(move |editor, _event, window, cx| { - cx.stop_propagation(); - if let Some(task) = editor.confirm_completion( - &ConfirmCompletion { - item_ix: Some(item_ix), - }, - window, - cx, - ) { - task.detach_and_log_err(cx) - } - })) - .start_slot::(start_slot) - .child(h_flex().overflow_hidden().child(completion_label)) - .end_slot::_point_to__point()` converts a point in co-ordinate space `A` into co-ordinate +//! space `B`. +//! - A [`RowInfo`] iterator (e.g. [`InlayBufferRows`]) and a [`Chunk`] iterator +//! (e.g. [`InlayChunks`]) +//! - A `sync` function (e.g. [`InlayMap::sync`]) that takes a snapshot and list of [`Edit`]s, +//! and returns a new snapshot and a list of transformed [`Edit`]s. Note that the generic +//! parameter on `Edit` changes, since these methods take in edits in the co-ordinate space of +//! the lower layer, and return edits in their own co-ordinate space. The term "edit" is +//! slightly misleading, since an [`Edit`] doesn't tell you what changed - rather it can be +//! thought of as a "region to invalidate". In theory, it would be correct to always use a +//! single edit that covers the entire range. However, this would lead to lots of unnecessary +//! recalculation. +//! +//! See the docs for the [`inlay_map`] module for a more in-depth explanation of how a single layer +//! works. +//! //! [Editor]: crate::Editor //! [EditorElement]: crate::element::EditorElement +//! [`TextSummary`]: multi_buffer::MBTextSummary +//! [`WrapRow`]: wrap_map::WrapRow +//! [`InlayBufferRows`]: inlay_map::InlayBufferRows +//! [`InlayChunks`]: inlay_map::InlayChunks +//! [`Edit`]: text::Edit +//! [`Edit`]: text::Edit +//! [`Chunk`]: language::Chunk #[macro_use] mod dimensions; @@ -44,12 +93,10 @@ pub use invisibles::{is_invisible, replacement}; use collections::{HashMap, HashSet}; use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle}; -use language::{ - OffsetUtf16, Point, Subscription as BufferSubscription, language_settings::language_settings, -}; +use language::{Point, Subscription as BufferSubscription, language_settings::language_settings}; use multi_buffer::{ - Anchor, AnchorRangeExt, MultiBuffer, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, - RowInfo, ToOffset, ToPoint, + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, + MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint, }; use project::InlayId; use project::project_settings::DiagnosticSeverity; @@ -58,6 +105,7 @@ use sum_tree::{Bias, TreeMap}; use text::{BufferId, LineIndent}; use ui::{SharedString, px}; use unicode_segmentation::UnicodeSegmentation; +use ztracing::instrument; use std::{ any::TypeId, @@ -104,7 +152,7 @@ type InlayHighlights = TreeMap, - buffer_subscription: BufferSubscription, + buffer_subscription: BufferSubscription, /// Decides where the [`Inlay`]s should be displayed. inlay_map: InlayMap, /// Decides where the fold indicators should be and tracks parts of a source file that are currently folded. @@ -170,6 +218,7 @@ impl DisplayMap { } } + #[instrument(skip_all)] pub fn snapshot(&mut self, cx: &mut Context) -> DisplaySnapshot { let tab_size = Self::tab_size(&self.buffer, cx); @@ -183,6 +232,8 @@ impl DisplayMap { .update(cx, |map, cx| map.sync(tab_snapshot, edits, cx)); let block_snapshot = self.block_map.read(wrap_snapshot, edits).snapshot; + // todo word diff here? + DisplaySnapshot { block_snapshot, diagnostics_max_severity: self.diagnostics_max_severity, @@ -195,10 +246,11 @@ impl DisplayMap { } } + #[instrument(skip_all)] pub fn set_state(&mut self, other: &DisplaySnapshot, cx: &mut Context) { self.fold( other - .folds_in_range(0..other.buffer_snapshot().len()) + .folds_in_range(MultiBufferOffset(0)..other.buffer_snapshot().len()) .map(|fold| { Crease::simple( fold.range.to_offset(other.buffer_snapshot()), @@ -211,6 +263,7 @@ impl DisplayMap { } /// Creates folds for the given creases. + #[instrument(skip_all)] pub fn fold(&mut self, creases: Vec>, cx: &mut Context) { let buffer_snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); @@ -279,6 +332,7 @@ impl DisplayMap { } /// Removes any folds with the given ranges. + #[instrument(skip_all)] pub fn remove_folds_with_type( &mut self, ranges: impl IntoIterator>, @@ -304,6 +358,7 @@ impl DisplayMap { } /// Removes any folds whose ranges intersect any of the given ranges. + #[instrument(skip_all)] pub fn unfold_intersecting( &mut self, ranges: impl IntoIterator>, @@ -335,6 +390,7 @@ impl DisplayMap { block_map.remove_intersecting_replace_blocks(offset_ranges, inclusive); } + #[instrument(skip_all)] pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); @@ -349,6 +405,7 @@ impl DisplayMap { block_map.disable_header_for_buffer(buffer_id) } + #[instrument(skip_all)] pub fn fold_buffers( &mut self, buffer_ids: impl IntoIterator, @@ -367,6 +424,7 @@ impl DisplayMap { block_map.fold_buffers(buffer_ids, self.buffer.read(cx), cx) } + #[instrument(skip_all)] pub fn unfold_buffers( &mut self, buffer_ids: impl IntoIterator, @@ -385,14 +443,17 @@ impl DisplayMap { block_map.unfold_buffers(buffer_ids, self.buffer.read(cx), cx) } + #[instrument(skip_all)] pub(crate) fn is_buffer_folded(&self, buffer_id: language::BufferId) -> bool { self.block_map.folded_buffers.contains(&buffer_id) } + #[instrument(skip_all)] pub(crate) fn folded_buffers(&self) -> &HashSet { &self.block_map.folded_buffers } + #[instrument(skip_all)] pub fn insert_creases( &mut self, creases: impl IntoIterator>, @@ -402,6 +463,7 @@ impl DisplayMap { self.crease_map.insert(creases, &snapshot) } + #[instrument(skip_all)] pub fn remove_creases( &mut self, crease_ids: impl IntoIterator, @@ -411,6 +473,7 @@ impl DisplayMap { self.crease_map.remove(crease_ids, &snapshot) } + #[instrument(skip_all)] pub fn insert_blocks( &mut self, blocks: impl IntoIterator>, @@ -429,6 +492,7 @@ impl DisplayMap { block_map.insert(blocks) } + #[instrument(skip_all)] pub fn resize_blocks(&mut self, heights: HashMap, cx: &mut Context) { let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); @@ -443,10 +507,12 @@ impl DisplayMap { block_map.resize(heights); } + #[instrument(skip_all)] pub fn replace_blocks(&mut self, renderers: HashMap) { self.block_map.replace_blocks(renderers); } + #[instrument(skip_all)] pub fn remove_blocks(&mut self, ids: HashSet, cx: &mut Context) { let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); @@ -461,6 +527,7 @@ impl DisplayMap { block_map.remove(ids); } + #[instrument(skip_all)] pub fn row_for_block( &mut self, block_id: CustomBlockId, @@ -480,15 +547,35 @@ impl DisplayMap { Some(DisplayRow(block_row.0)) } + #[instrument(skip_all)] pub fn highlight_text( &mut self, key: HighlightKey, ranges: Vec>, style: HighlightStyle, + merge: bool, + cx: &App, ) { - self.text_highlights.insert(key, Arc::new((style, ranges))); + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let to_insert = match self.text_highlights.remove(&key).filter(|_| merge) { + Some(previous) => { + let mut merged_ranges = previous.1.clone(); + for new_range in ranges { + let i = merged_ranges + .binary_search_by(|probe| { + probe.start.cmp(&new_range.start, &multi_buffer_snapshot) + }) + .unwrap_or_else(|i| i); + merged_ranges.insert(i, new_range); + } + Arc::new((style, merged_ranges)) + } + None => Arc::new((style, ranges)), + }; + self.text_highlights.insert(key, to_insert); } + #[instrument(skip_all)] pub(crate) fn highlight_inlays( &mut self, type_id: TypeId, @@ -508,6 +595,7 @@ impl DisplayMap { } } + #[instrument(skip_all)] pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range])> { let highlights = self.text_highlights.get(&HighlightKey::Type(type_id))?; Some((highlights.0, &highlights.1)) @@ -520,11 +608,21 @@ impl DisplayMap { self.text_highlights.values() } + #[instrument(skip_all)] pub fn clear_highlights(&mut self, type_id: TypeId) -> bool { let mut cleared = self .text_highlights .remove(&HighlightKey::Type(type_id)) .is_some(); + self.text_highlights.retain(|key, _| { + let retain = if let HighlightKey::TypePlus(key_type_id, _) = key { + key_type_id != &type_id + } else { + true + }; + cleared |= !retain; + retain + }); cleared |= self.inlay_highlights.remove(&type_id).is_some(); cleared } @@ -539,6 +637,7 @@ impl DisplayMap { .update(cx, |map, cx| map.set_wrap_width(width, cx)) } + #[instrument(skip_all)] pub fn update_fold_widths( &mut self, widths: impl IntoIterator, @@ -570,6 +669,7 @@ impl DisplayMap { self.inlay_map.current_inlays() } + #[instrument(skip_all)] pub(crate) fn splice_inlays( &mut self, to_remove: &[InlayId], @@ -599,6 +699,7 @@ impl DisplayMap { self.block_map.read(snapshot, edits); } + #[instrument(skip_all)] fn tab_size(buffer: &Entity, cx: &App) -> NonZeroU32 { let buffer = buffer.read(cx).as_singleton().map(|buffer| buffer.read(cx)); let language = buffer @@ -648,6 +749,7 @@ pub struct HighlightedChunk<'a> { } impl<'a> HighlightedChunk<'a> { + #[instrument(skip_all)] fn highlight_invisibles( self, editor_style: &'a EditorStyle, @@ -794,7 +896,7 @@ impl DisplaySnapshot { } pub fn is_empty(&self) -> bool { - self.buffer_snapshot().len() == 0 + self.buffer_snapshot().len() == MultiBufferOffset(0) } pub fn row_infos(&self, start_row: DisplayRow) -> impl Iterator + '_ { @@ -805,6 +907,7 @@ impl DisplaySnapshot { self.buffer_snapshot().widest_line_number() } + #[instrument(skip_all)] pub fn prev_line_boundary(&self, mut point: MultiBufferPoint) -> (Point, DisplayPoint) { loop { let mut inlay_point = self.inlay_snapshot().to_inlay_point(point); @@ -823,6 +926,7 @@ impl DisplaySnapshot { } } + #[instrument(skip_all)] pub fn next_line_boundary( &self, mut point: MultiBufferPoint, @@ -861,10 +965,11 @@ impl DisplaySnapshot { new_start..new_end } + #[instrument(skip_all)] pub fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint { let inlay_point = self.inlay_snapshot().to_inlay_point(point); let fold_point = self.fold_snapshot().to_fold_point(inlay_point, bias); - let tab_point = self.tab_snapshot().to_tab_point(fold_point); + let tab_point = self.tab_snapshot().fold_point_to_tab_point(fold_point); let wrap_point = self.wrap_snapshot().tab_point_to_wrap_point(tab_point); let block_point = self.block_snapshot.to_block_point(wrap_point); DisplayPoint(block_point) @@ -890,23 +995,31 @@ impl DisplaySnapshot { .anchor_at(point.to_offset(self, bias), bias) } + #[instrument(skip_all)] fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint { let block_point = point.0; let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias); let tab_point = self.wrap_snapshot().to_tab_point(wrap_point); - let fold_point = self.tab_snapshot().to_fold_point(tab_point, bias).0; + let fold_point = self + .tab_snapshot() + .tab_point_to_fold_point(tab_point, bias) + .0; fold_point.to_inlay_point(self.fold_snapshot()) } + #[instrument(skip_all)] pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint { let block_point = point.0; let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias); let tab_point = self.wrap_snapshot().to_tab_point(wrap_point); - self.tab_snapshot().to_fold_point(tab_point, bias).0 + self.tab_snapshot() + .tab_point_to_fold_point(tab_point, bias) + .0 } + #[instrument(skip_all)] pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint { - let tab_point = self.tab_snapshot().to_tab_point(fold_point); + let tab_point = self.tab_snapshot().fold_point_to_tab_point(fold_point); let wrap_point = self.wrap_snapshot().tab_point_to_wrap_point(tab_point); let block_point = self.block_snapshot.to_block_point(wrap_point); DisplayPoint(block_point) @@ -917,6 +1030,7 @@ impl DisplaySnapshot { } /// Returns text chunks starting at the given display row until the end of the file + #[instrument(skip_all)] pub fn text_chunks(&self, display_row: DisplayRow) -> impl Iterator { self.block_snapshot .chunks( @@ -929,6 +1043,7 @@ impl DisplaySnapshot { } /// Returns text chunks starting at the end of the given display row in reverse until the start of the file + #[instrument(skip_all)] pub fn reverse_text_chunks(&self, display_row: DisplayRow) -> impl Iterator { (0..=display_row.0).rev().flat_map(move |row| { self.block_snapshot @@ -945,6 +1060,7 @@ impl DisplaySnapshot { }) } + #[instrument(skip_all)] pub fn chunks( &self, display_rows: Range, @@ -963,6 +1079,7 @@ impl DisplaySnapshot { ) } + #[instrument(skip_all)] pub fn highlighted_chunks<'a>( &'a self, display_rows: Range, @@ -1039,6 +1156,7 @@ impl DisplaySnapshot { }) } + #[instrument(skip_all)] pub fn layout_row( &self, display_row: DisplayRow, @@ -1097,9 +1215,10 @@ impl DisplaySnapshot { details: &TextLayoutDetails, ) -> u32 { let layout_line = self.layout_row(display_row, details); - layout_line.index_for_x(x) as u32 + layout_line.closest_index_for_x(x) as u32 } + #[instrument(skip_all)] pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option { point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left)); let chars = self @@ -1133,7 +1252,10 @@ impl DisplaySnapshot { }) } - pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator + '_ { + pub fn buffer_chars_at( + &self, + mut offset: MultiBufferOffset, + ) -> impl Iterator + '_ { self.buffer_snapshot().chars_at(offset).map(move |ch| { let ret = (ch, offset); offset += ch.len_utf8(); @@ -1143,8 +1265,8 @@ impl DisplaySnapshot { pub fn reverse_buffer_chars_at( &self, - mut offset: usize, - ) -> impl Iterator + '_ { + mut offset: MultiBufferOffset, + ) -> impl Iterator + '_ { self.buffer_snapshot() .reversed_chars_at(offset) .map(move |ch| { @@ -1286,6 +1408,7 @@ impl DisplaySnapshot { .unwrap_or(false) } + #[instrument(skip_all)] pub fn crease_for_buffer_row(&self, buffer_row: MultiBufferRow) -> Option> { let start = MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot().line_len(buffer_row)); @@ -1372,6 +1495,7 @@ impl DisplaySnapshot { } #[cfg(any(test, feature = "test-support"))] + #[instrument(skip_all)] pub fn text_highlight_ranges( &self, ) -> Option>)>> { @@ -1381,6 +1505,34 @@ impl DisplaySnapshot { .cloned() } + #[cfg(any(test, feature = "test-support"))] + #[instrument(skip_all)] + pub fn all_text_highlight_ranges( + &self, + ) -> Vec<(gpui::Hsla, Range)> { + use itertools::Itertools; + + let required_type_id = TypeId::of::(); + self.text_highlights + .iter() + .filter(|(key, _)| match key { + HighlightKey::Type(type_id) => type_id == &required_type_id, + HighlightKey::TypePlus(type_id, _) => type_id == &required_type_id, + }) + .map(|(_, value)| value.clone()) + .flat_map(|ranges| { + ranges + .1 + .iter() + .flat_map(|range| { + Some((ranges.0.color?, range.to_point(self.buffer_snapshot()))) + }) + .collect::>() + }) + .sorted_by_key(|(_, range)| range.start) + .collect() + } + #[allow(unused)] #[cfg(any(test, feature = "test-support"))] pub(crate) fn inlay_highlights( @@ -1404,6 +1556,7 @@ impl DisplaySnapshot { /// /// This moves by buffer rows instead of display rows, a distinction that is /// important when soft wrapping is enabled. + #[instrument(skip_all)] pub fn start_of_relative_buffer_row(&self, point: DisplayPoint, times: isize) -> DisplayPoint { let start = self.display_point_to_fold_point(point, Bias::Left); let target = start.row() as isize + times; @@ -1526,23 +1679,26 @@ impl DisplayPoint { map.display_point_to_point(self, Bias::Left) } - pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize { + pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> MultiBufferOffset { let wrap_point = map.block_snapshot.to_wrap_point(self.0, bias); let tab_point = map.wrap_snapshot().to_tab_point(wrap_point); - let fold_point = map.tab_snapshot().to_fold_point(tab_point, bias).0; + let fold_point = map + .tab_snapshot() + .tab_point_to_fold_point(tab_point, bias) + .0; let inlay_point = fold_point.to_inlay_point(map.fold_snapshot()); map.inlay_snapshot() .to_buffer_offset(map.inlay_snapshot().to_offset(inlay_point)) } } -impl ToDisplayPoint for usize { +impl ToDisplayPoint for MultiBufferOffset { fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { map.point_to_display_point(self.to_point(map.buffer_snapshot()), Bias::Left) } } -impl ToDisplayPoint for OffsetUtf16 { +impl ToDisplayPoint for MultiBufferOffsetUtf16 { fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { self.to_offset(map.buffer_snapshot()).to_display_point(map) } @@ -1685,7 +1841,7 @@ pub mod tests { let block_properties = (0..rng.random_range(1..=1)) .map(|_| { let position = buffer.anchor_after(buffer.clip_offset( - rng.random_range(0..=buffer.len()), + rng.random_range(MultiBufferOffset(0)..=buffer.len()), Bias::Left, )); @@ -1727,8 +1883,12 @@ pub mod tests { for _ in 0..rng.random_range(1..=3) { buffer.read_with(cx, |buffer, cx| { let buffer = buffer.read(cx); - let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.random_range(0..=end), Left); + let end = buffer.clip_offset( + rng.random_range(MultiBufferOffset(0)..=buffer.len()), + Right, + ); + let start = buffer + .clip_offset(rng.random_range(MultiBufferOffset(0)..=end), Left); ranges.push(start..end); }); } @@ -1954,7 +2114,7 @@ pub mod tests { ) ); - let ix = snapshot.buffer_snapshot().text().find("seven").unwrap(); + let ix = MultiBufferOffset(snapshot.buffer_snapshot().text().find("seven").unwrap()); buffer.update(cx, |buffer, cx| { buffer.edit([(ix..ix, "and ")], None, cx); }); @@ -2083,7 +2243,7 @@ pub mod tests { &[], vec![Inlay::edit_prediction( 0, - buffer_snapshot.anchor_after(0), + buffer_snapshot.anchor_after(MultiBufferOffset(0)), "\n", )], cx, @@ -2094,7 +2254,11 @@ pub mod tests { // Regression test: updating the display map does not crash when a // block is immediately followed by a multi-line inlay. buffer.update(cx, |buffer, cx| { - buffer.edit([(1..1, "b")], None, cx); + buffer.edit( + [(MultiBufferOffset(1)..MultiBufferOffset(1), "b")], + None, + cx, + ); }); map.update(cx, |m, cx| assert_eq!(m.snapshot(cx).text(), "\n\n\nab")); } @@ -2378,6 +2542,8 @@ pub mod tests { ..buffer_snapshot.anchor_after(Point::new(3, 18)), ], red.into(), + false, + cx, ); map.insert_blocks( [BlockProperties { @@ -2689,17 +2855,20 @@ pub mod tests { ..Default::default() }; - map.update(cx, |map, _cx| { + map.update(cx, |map, cx| { map.highlight_text( HighlightKey::Type(TypeId::of::()), highlighted_ranges .into_iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) .map(|range| { buffer_snapshot.anchor_before(range.start) ..buffer_snapshot.anchor_before(range.end) }) .collect(), style, + false, + cx, ); }); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 639d2a06579ca16eb938f3d23908e48b702254ef..15bf012cd907da2455c1a2205bcccd363162fd46 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -11,8 +11,8 @@ use collections::{Bound, HashMap, HashSet}; use gpui::{AnyElement, App, EntityId, Pixels, Window}; use language::{Patch, Point}; use multi_buffer::{ - Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, RowInfo, - ToOffset, ToPoint as _, + Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferRow, + MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _, }; use parking_lot::Mutex; use std::{ @@ -164,6 +164,7 @@ impl BlockPlacement { } impl BlockPlacement { + #[ztracing::instrument(skip_all)] fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering { self.start() .cmp(other.start(), buffer) @@ -171,6 +172,7 @@ impl BlockPlacement { .then_with(|| self.tie_break().cmp(&other.tie_break())) } + #[ztracing::instrument(skip_all)] fn to_wrap_row(&self, wrap_snapshot: &WrapSnapshot) -> Option> { let buffer_snapshot = wrap_snapshot.buffer_snapshot(); match self { @@ -474,6 +476,7 @@ pub struct BlockRows<'a> { } impl BlockMap { + #[ztracing::instrument(skip_all)] pub fn new( wrap_snapshot: WrapSnapshot, buffer_header_height: u32, @@ -503,6 +506,7 @@ impl BlockMap { map } + #[ztracing::instrument(skip_all)] pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: WrapPatch) -> BlockMapReader<'_> { self.sync(&wrap_snapshot, edits); *self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone(); @@ -518,13 +522,17 @@ impl BlockMap { } } + #[ztracing::instrument(skip_all)] pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: WrapPatch) -> BlockMapWriter<'_> { self.sync(&wrap_snapshot, edits); *self.wrap_snapshot.borrow_mut() = wrap_snapshot; BlockMapWriter(self) } + #[ztracing::instrument(skip_all, fields(edits = ?edits))] fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: WrapPatch) { + let _timer = zlog::time!("BlockMap::sync").warn_if_gt(std::time::Duration::from_millis(50)); + let buffer = wrap_snapshot.buffer_snapshot(); // Handle changing the last excerpt if it is empty. @@ -537,7 +545,7 @@ impl BlockMap { { let max_point = wrap_snapshot.max_point(); let edit_start = wrap_snapshot.prev_row_boundary(max_point); - let edit_end = max_point.row() + WrapRow(1); + let edit_end = max_point.row() + WrapRow(1); // this is end of file edits = edits.compose([WrapEdit { old: edit_start..edit_end, new: edit_start..edit_end, @@ -556,7 +564,15 @@ impl BlockMap { let mut blocks_in_edit = Vec::new(); let mut edits = edits.into_iter().peekable(); + let mut inlay_point_cursor = wrap_snapshot.inlay_point_cursor(); + let mut tab_point_cursor = wrap_snapshot.tab_point_cursor(); + let mut fold_point_cursor = wrap_snapshot.fold_point_cursor(); + let mut wrap_point_cursor = wrap_snapshot.wrap_point_cursor(); + while let Some(edit) = edits.next() { + let span = ztracing::debug_span!("while edits", edit = ?edit); + let _enter = span.enter(); + let mut old_start = edit.old.start; let mut new_start = edit.new.start; @@ -615,6 +631,8 @@ impl BlockMap { let mut old_end = edit.old.end; let mut new_end = edit.new.end; loop { + let span = ztracing::debug_span!("decide where edit ends loop"); + let _enter = span.enter(); // Seek to the transform starting at or after the end of the edit cursor.seek(&old_end, Bias::Left); cursor.next(); @@ -686,6 +704,9 @@ impl BlockMap { last_block_ix = end_block_ix; debug_assert!(blocks_in_edit.is_empty()); + // + 8 is chosen arbitrarily to cover some multibuffer headers + blocks_in_edit + .reserve(end_block_ix - start_block_ix + if buffer.is_singleton() { 0 } else { 8 }); blocks_in_edit.extend( self.custom_blocks[start_block_ix..end_block_ix] @@ -694,6 +715,7 @@ impl BlockMap { let placement = block.placement.to_wrap_row(wrap_snapshot)?; if let BlockPlacement::Above(row) = placement && row < new_start + // this will be true more often now { return None; } @@ -704,7 +726,14 @@ impl BlockMap { blocks_in_edit.extend(self.header_and_footer_blocks( buffer, (start_bound, end_bound), - wrap_snapshot, + |point, bias| { + wrap_point_cursor + .map( + tab_point_cursor + .map(fold_point_cursor.map(inlay_point_cursor.map(point), bias)), + ) + .row() + }, )); BlockMap::sort_blocks(&mut blocks_in_edit); @@ -713,6 +742,10 @@ impl BlockMap { // and then insert the block itself. let mut just_processed_folded_buffer = false; for (block_placement, block) in blocks_in_edit.drain(..) { + let span = + ztracing::debug_span!("for block in edits", block_height = block.height()); + let _enter = span.enter(); + let mut summary = TransformSummary { input_rows: WrapRow(0), output_rows: BlockRow(block.height()), @@ -769,6 +802,7 @@ impl BlockMap { *transforms = new_transforms; } + #[ztracing::instrument(skip_all)] pub fn replace_blocks(&mut self, mut renderers: HashMap) { for block in &mut self.custom_blocks { if let Some(render) = renderers.remove(&block.id) { @@ -777,11 +811,13 @@ impl BlockMap { } } + /// Guarantees that `wrap_row_for` is called with points in increasing order. + #[ztracing::instrument(skip_all)] fn header_and_footer_blocks<'a, R, T>( &'a self, buffer: &'a multi_buffer::MultiBufferSnapshot, range: R, - wrap_snapshot: &'a WrapSnapshot, + mut wrap_row_for: impl 'a + FnMut(Point, Bias) -> WrapRow, ) -> impl Iterator, Block)> + 'a where R: RangeBounds, @@ -792,9 +828,7 @@ impl BlockMap { std::iter::from_fn(move || { loop { let excerpt_boundary = boundaries.next()?; - let wrap_row = wrap_snapshot - .make_wrap_point(Point::new(excerpt_boundary.row.0, 0), Bias::Left) - .row(); + let wrap_row = wrap_row_for(Point::new(excerpt_boundary.row.0, 0), Bias::Left); let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) { (None, next) => Some(next.buffer_id), @@ -826,16 +860,13 @@ impl BlockMap { boundaries.next(); } - - let wrap_end_row = wrap_snapshot - .make_wrap_point( - Point::new( - last_excerpt_end_row.0, - buffer.line_len(last_excerpt_end_row), - ), - Bias::Right, - ) - .row(); + let wrap_end_row = wrap_row_for( + Point::new( + last_excerpt_end_row.0, + buffer.line_len(last_excerpt_end_row), + ), + Bias::Right, + ); return Some(( BlockPlacement::Replace(wrap_row..=wrap_end_row), @@ -869,6 +900,7 @@ impl BlockMap { }) } + #[ztracing::instrument(skip_all)] fn sort_blocks(blocks: &mut Vec<(BlockPlacement, Block)>) { blocks.sort_unstable_by(|(placement_a, block_a), (placement_b, block_b)| { placement_a @@ -935,6 +967,7 @@ impl BlockMap { } } +#[ztracing::instrument(skip(tree, wrap_snapshot))] fn push_isomorphic(tree: &mut SumTree, rows: RowDelta, wrap_snapshot: &WrapSnapshot) { if rows == RowDelta(0) { return; @@ -1005,6 +1038,7 @@ impl DerefMut for BlockMapReader<'_> { } impl BlockMapReader<'_> { + #[ztracing::instrument(skip_all)] pub fn row_for_block(&self, block_id: CustomBlockId) -> Option { let block = self.blocks.iter().find(|block| block.id == block_id)?; let buffer_row = block @@ -1043,6 +1077,7 @@ impl BlockMapReader<'_> { } impl BlockMapWriter<'_> { + #[ztracing::instrument(skip_all)] pub fn insert( &mut self, blocks: impl IntoIterator>, @@ -1109,6 +1144,7 @@ impl BlockMapWriter<'_> { ids } + #[ztracing::instrument(skip_all)] pub fn resize(&mut self, mut heights: HashMap) { let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); let buffer = wrap_snapshot.buffer_snapshot(); @@ -1161,6 +1197,7 @@ impl BlockMapWriter<'_> { self.0.sync(wrap_snapshot, edits); } + #[ztracing::instrument(skip_all)] pub fn remove(&mut self, block_ids: HashSet) { let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); let buffer = wrap_snapshot.buffer_snapshot(); @@ -1206,9 +1243,10 @@ impl BlockMapWriter<'_> { self.0.sync(wrap_snapshot, edits); } + #[ztracing::instrument(skip_all)] pub fn remove_intersecting_replace_blocks( &mut self, - ranges: impl IntoIterator>, + ranges: impl IntoIterator>, inclusive: bool, ) { let wrap_snapshot = self.0.wrap_snapshot.borrow(); @@ -1228,6 +1266,7 @@ impl BlockMapWriter<'_> { self.0.buffers_with_disabled_headers.insert(buffer_id); } + #[ztracing::instrument(skip_all)] pub fn fold_buffers( &mut self, buffer_ids: impl IntoIterator, @@ -1237,6 +1276,7 @@ impl BlockMapWriter<'_> { self.fold_or_unfold_buffers(true, buffer_ids, multi_buffer, cx); } + #[ztracing::instrument(skip_all)] pub fn unfold_buffers( &mut self, buffer_ids: impl IntoIterator, @@ -1246,6 +1286,7 @@ impl BlockMapWriter<'_> { self.fold_or_unfold_buffers(false, buffer_ids, multi_buffer, cx); } + #[ztracing::instrument(skip_all)] fn fold_or_unfold_buffers( &mut self, fold: bool, @@ -1281,9 +1322,10 @@ impl BlockMapWriter<'_> { self.0.sync(&wrap_snapshot, edits); } + #[ztracing::instrument(skip_all)] fn blocks_intersecting_buffer_range( &self, - range: Range, + range: Range, inclusive: bool, ) -> &[Arc] { if range.is_empty() && !inclusive { @@ -1315,6 +1357,7 @@ impl BlockMapWriter<'_> { impl BlockSnapshot { #[cfg(test)] + #[ztracing::instrument(skip_all)] pub fn text(&self) -> String { self.chunks( BlockRow(0)..self.transforms.summary().output_rows, @@ -1326,6 +1369,7 @@ impl BlockSnapshot { .collect() } + #[ztracing::instrument(skip_all)] pub(crate) fn chunks<'a>( &'a self, rows: Range, @@ -1367,6 +1411,7 @@ impl BlockSnapshot { } } + #[ztracing::instrument(skip_all)] pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> { let mut cursor = self.transforms.cursor::>(()); cursor.seek(&start_row, Bias::Right); @@ -1388,6 +1433,7 @@ impl BlockSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn blocks_in_range( &self, rows: Range, @@ -1421,6 +1467,7 @@ impl BlockSnapshot { }) } + #[ztracing::instrument(skip_all)] pub(crate) fn sticky_header_excerpt(&self, position: f64) -> Option> { let top_row = position as u32; let mut cursor = self.transforms.cursor::(()); @@ -1444,6 +1491,7 @@ impl BlockSnapshot { None } + #[ztracing::instrument(skip_all)] pub fn block_for_id(&self, block_id: BlockId) -> Option { let buffer = self.wrap_snapshot.buffer_snapshot(); let wrap_point = match block_id { @@ -1480,6 +1528,7 @@ impl BlockSnapshot { None } + #[ztracing::instrument(skip_all)] pub fn max_point(&self) -> BlockPoint { let row = self .transforms @@ -1489,10 +1538,12 @@ impl BlockSnapshot { BlockPoint::new(row, self.line_len(row)) } + #[ztracing::instrument(skip_all)] pub fn longest_row(&self) -> BlockRow { self.transforms.summary().longest_row } + #[ztracing::instrument(skip_all)] pub fn longest_row_in_range(&self, range: Range) -> BlockRow { let mut cursor = self.transforms.cursor::>(()); cursor.seek(&range.start, Bias::Right); @@ -1544,6 +1595,7 @@ impl BlockSnapshot { longest_row } + #[ztracing::instrument(skip_all)] pub(super) fn line_len(&self, row: BlockRow) -> u32 { let (start, _, item) = self.transforms @@ -1563,11 +1615,13 @@ impl BlockSnapshot { } } + #[ztracing::instrument(skip_all)] pub(super) fn is_block_line(&self, row: BlockRow) -> bool { let (_, _, item) = self.transforms.find::((), &row, Bias::Right); item.is_some_and(|t| t.block.is_some()) } + #[ztracing::instrument(skip_all)] pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool { let (_, _, item) = self.transforms.find::((), &row, Bias::Right); let Some(transform) = item else { @@ -1576,6 +1630,7 @@ impl BlockSnapshot { matches!(transform.block, Some(Block::FoldedBuffer { .. })) } + #[ztracing::instrument(skip_all)] pub(super) fn is_line_replaced(&self, row: MultiBufferRow) -> bool { let wrap_point = self .wrap_snapshot @@ -1591,6 +1646,7 @@ impl BlockSnapshot { }) } + #[ztracing::instrument(skip_all)] pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { let mut cursor = self.transforms.cursor::>(()); cursor.seek(&BlockRow(point.row), Bias::Right); @@ -1652,6 +1708,7 @@ impl BlockSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { let (start, _, item) = self.transforms.find::, _>( (), @@ -1673,6 +1730,7 @@ impl BlockSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint { let (start, end, item) = self.transforms.find::, _>( (), @@ -1708,6 +1766,7 @@ impl BlockSnapshot { impl BlockChunks<'_> { /// Go to the next transform + #[ztracing::instrument(skip_all)] fn advance(&mut self) { self.input_chunk = Chunk::default(); self.transforms.next(); @@ -1748,6 +1807,7 @@ pub struct StickyHeaderExcerpt<'a> { impl<'a> Iterator for BlockChunks<'a> { type Item = Chunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.output_row >= self.max_output_row { return None; @@ -1847,6 +1907,7 @@ impl<'a> Iterator for BlockChunks<'a> { impl Iterator for BlockRows<'_> { type Item = RowInfo; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.started { self.output_row.0 += 1; @@ -1949,14 +2010,17 @@ impl DerefMut for BlockContext<'_, '_> { } impl CustomBlock { + #[ztracing::instrument(skip_all)] pub fn render(&self, cx: &mut BlockContext) -> AnyElement { self.render.lock()(cx) } + #[ztracing::instrument(skip_all)] pub fn start(&self) -> Anchor { *self.placement.start() } + #[ztracing::instrument(skip_all)] pub fn end(&self) -> Anchor { *self.placement.end() } @@ -2976,7 +3040,7 @@ mod tests { ); } - #[gpui::test(iterations = 100)] + #[gpui::test(iterations = 60)] fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) { cx.update(init_test); @@ -3043,8 +3107,10 @@ mod tests { let block_properties = (0..block_count) .map(|_| { let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone()); - let offset = - buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Left); + let offset = buffer.clip_offset( + rng.random_range(MultiBufferOffset(0)..=buffer.len()), + Bias::Left, + ); let mut min_height = 0; let placement = match rng.random_range(0..3) { 0 => { @@ -3241,11 +3307,23 @@ mod tests { )) })); + let mut inlay_point_cursor = wraps_snapshot.inlay_point_cursor(); + let mut tab_point_cursor = wraps_snapshot.tab_point_cursor(); + let mut fold_point_cursor = wraps_snapshot.fold_point_cursor(); + let mut wrap_point_cursor = wraps_snapshot.wrap_point_cursor(); + // Note that this needs to be synced with the related section in BlockMap::sync expected_blocks.extend(block_map.header_and_footer_blocks( &buffer_snapshot, - 0.., - &wraps_snapshot, + MultiBufferOffset(0).., + |point, bias| { + wrap_point_cursor + .map( + tab_point_cursor + .map(fold_point_cursor.map(inlay_point_cursor.map(point), bias)), + ) + .row() + }, )); BlockMap::sort_blocks(&mut expected_blocks); diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index a68c27886733d34a60ef0ce2ef4006b92b679db9..8f4a3781f4f335f1a3e61ec5a19818661a7c6ea5 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -19,6 +19,7 @@ pub struct CreaseMap { } impl CreaseMap { + #[ztracing::instrument(skip_all)] pub fn new(snapshot: &MultiBufferSnapshot) -> Self { CreaseMap { snapshot: CreaseSnapshot::new(snapshot), @@ -40,11 +41,13 @@ impl CreaseSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn creases(&self) -> impl Iterator)> { self.creases.iter().map(|item| (item.id, &item.crease)) } /// Returns the first Crease starting on the specified buffer row. + #[ztracing::instrument(skip_all)] pub fn query_row<'a>( &'a self, row: MultiBufferRow, @@ -69,6 +72,7 @@ impl CreaseSnapshot { None } + #[ztracing::instrument(skip_all)] pub fn creases_in_range<'a>( &'a self, range: Range, @@ -95,6 +99,7 @@ impl CreaseSnapshot { }) } + #[ztracing::instrument(skip_all)] pub fn crease_items_with_offsets( &self, snapshot: &MultiBufferSnapshot, @@ -156,6 +161,7 @@ pub struct CreaseMetadata { } impl Crease { + #[ztracing::instrument(skip_all)] pub fn simple(range: Range, placeholder: FoldPlaceholder) -> Self { Crease::Inline { range, @@ -166,6 +172,7 @@ impl Crease { } } + #[ztracing::instrument(skip_all)] pub fn block(range: Range, height: u32, style: BlockStyle, render: RenderBlock) -> Self { Self::Block { range, @@ -177,6 +184,7 @@ impl Crease { } } + #[ztracing::instrument(skip_all)] pub fn inline( range: Range, placeholder: FoldPlaceholder, @@ -216,6 +224,7 @@ impl Crease { } } + #[ztracing::instrument(skip_all)] pub fn with_metadata(self, metadata: CreaseMetadata) -> Self { match self { Crease::Inline { @@ -235,6 +244,7 @@ impl Crease { } } + #[ztracing::instrument(skip_all)] pub fn range(&self) -> &Range { match self { Crease::Inline { range, .. } => range, @@ -242,6 +252,7 @@ impl Crease { } } + #[ztracing::instrument(skip_all)] pub fn metadata(&self) -> Option<&CreaseMetadata> { match self { Self::Inline { metadata, .. } => metadata.as_ref(), @@ -287,6 +298,7 @@ impl CreaseMap { self.snapshot.clone() } + #[ztracing::instrument(skip_all)] pub fn insert( &mut self, creases: impl IntoIterator>, @@ -312,6 +324,7 @@ impl CreaseMap { new_ids } + #[ztracing::instrument(skip_all)] pub fn remove( &mut self, ids: impl IntoIterator, @@ -379,6 +392,7 @@ impl sum_tree::Summary for ItemSummary { impl sum_tree::Item for CreaseItem { type Summary = ItemSummary; + #[ztracing::instrument(skip_all)] fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { ItemSummary { range: self.crease.range().clone(), @@ -388,12 +402,14 @@ impl sum_tree::Item for CreaseItem { /// Implements `SeekTarget` for `Range` to enable seeking within a `SumTree` of `CreaseItem`s. impl SeekTarget<'_, ItemSummary, ItemSummary> for Range { + #[ztracing::instrument(skip_all)] fn cmp(&self, cursor_location: &ItemSummary, snapshot: &MultiBufferSnapshot) -> Ordering { AnchorRangeExt::cmp(self, &cursor_location.range, snapshot) } } impl SeekTarget<'_, ItemSummary, ItemSummary> for Anchor { + #[ztracing::instrument(skip_all)] fn cmp(&self, other: &ItemSummary, snapshot: &MultiBufferSnapshot) -> Ordering { self.cmp(&other.range.start, snapshot) } @@ -461,6 +477,7 @@ mod test { } #[gpui::test] + #[ztracing::instrument(skip_all)] fn test_creases_in_range(cx: &mut App) { let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; let buffer = MultiBuffer::build_simple(text, cx); diff --git a/crates/editor/src/display_map/custom_highlights.rs b/crates/editor/src/display_map/custom_highlights.rs index c6b22bb0b8247420200c2bb8d9e22f55d638386d..1ece2493e3228536999036a32959a6228f0f7cd1 100644 --- a/crates/editor/src/display_map/custom_highlights.rs +++ b/crates/editor/src/display_map/custom_highlights.rs @@ -1,7 +1,7 @@ use collections::BTreeMap; use gpui::HighlightStyle; use language::Chunk; -use multi_buffer::{MultiBufferChunks, MultiBufferSnapshot, ToOffset as _}; +use multi_buffer::{MultiBufferChunks, MultiBufferOffset, MultiBufferSnapshot, ToOffset as _}; use std::{ cmp, iter::{self, Peekable}, @@ -14,7 +14,7 @@ use crate::display_map::{HighlightKey, TextHighlights}; pub struct CustomHighlightsChunks<'a> { buffer_chunks: MultiBufferChunks<'a>, buffer_chunk: Option>, - offset: usize, + offset: MultiBufferOffset, multibuffer_snapshot: &'a MultiBufferSnapshot, highlight_endpoints: Peekable>, @@ -24,14 +24,15 @@ pub struct CustomHighlightsChunks<'a> { #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct HighlightEndpoint { - offset: usize, + offset: MultiBufferOffset, tag: HighlightKey, style: Option, } impl<'a> CustomHighlightsChunks<'a> { + #[ztracing::instrument(skip_all)] pub fn new( - range: Range, + range: Range, language_aware: bool, text_highlights: Option<&'a TextHighlights>, multibuffer_snapshot: &'a MultiBufferSnapshot, @@ -40,7 +41,6 @@ impl<'a> CustomHighlightsChunks<'a> { buffer_chunks: multibuffer_snapshot.chunks(range.clone(), language_aware), buffer_chunk: None, offset: range.start, - text_highlights, highlight_endpoints: create_highlight_endpoints( &range, @@ -52,7 +52,8 @@ impl<'a> CustomHighlightsChunks<'a> { } } - pub fn seek(&mut self, new_range: Range) { + #[ztracing::instrument(skip_all)] + pub fn seek(&mut self, new_range: Range) { self.highlight_endpoints = create_highlight_endpoints(&new_range, self.text_highlights, self.multibuffer_snapshot); self.offset = new_range.start; @@ -63,7 +64,7 @@ impl<'a> CustomHighlightsChunks<'a> { } fn create_highlight_endpoints( - range: &Range, + range: &Range, text_highlights: Option<&TextHighlights>, buffer: &MultiBufferSnapshot, ) -> iter::Peekable> { @@ -75,22 +76,18 @@ fn create_highlight_endpoints( let style = text_highlights.0; let ranges = &text_highlights.1; - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe.end.cmp(&start, buffer); - if cmp.is_gt() { - cmp::Ordering::Greater - } else { - cmp::Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; + let start_ix = ranges + .binary_search_by(|probe| probe.end.cmp(&start, buffer).then(cmp::Ordering::Less)) + .unwrap_or_else(|i| i); + let end_ix = ranges[start_ix..] + .binary_search_by(|probe| { + probe.start.cmp(&end, buffer).then(cmp::Ordering::Greater) + }) + .unwrap_or_else(|i| i); - for range in &ranges[start_ix..] { - if range.start.cmp(&end, buffer).is_ge() { - break; - } + highlight_endpoints.reserve(2 * end_ix); + for range in &ranges[start_ix..][..end_ix] { let start = range.start.to_offset(buffer); let end = range.end.to_offset(buffer); if start == end { @@ -116,8 +113,9 @@ fn create_highlight_endpoints( impl<'a> Iterator for CustomHighlightsChunks<'a> { type Item = Chunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { - let mut next_highlight_endpoint = usize::MAX; + let mut next_highlight_endpoint = MultiBufferOffset(usize::MAX); while let Some(endpoint) = self.highlight_endpoints.peek().copied() { if endpoint.offset <= self.offset { if let Some(style) = endpoint.style { @@ -224,20 +222,22 @@ mod tests { let range_count = rng.random_range(1..10); let text = buffer_snapshot.text(); for _ in 0..range_count { - if buffer_snapshot.len() == 0 { + if buffer_snapshot.len() == MultiBufferOffset(0) { continue; } - let mut start = rng.random_range(0..=buffer_snapshot.len().saturating_sub(10)); + let mut start = rng.random_range( + MultiBufferOffset(0)..=buffer_snapshot.len().saturating_sub_usize(10), + ); - while !text.is_char_boundary(start) { - start = start.saturating_sub(1); + while !text.is_char_boundary(start.0) { + start = start.saturating_sub_usize(1); } - let end_end = buffer_snapshot.len().min(start + 100); + let end_end = buffer_snapshot.len().min(start + 100usize); let mut end = rng.random_range(start..=end_end); - while !text.is_char_boundary(end) { - end = end.saturating_sub(1); + while !text.is_char_boundary(end.0) { + end = end.saturating_sub_usize(1); } if start < end { @@ -253,8 +253,12 @@ mod tests { } // Get all chunks and verify their bitmaps - let chunks = - CustomHighlightsChunks::new(0..buffer_snapshot.len(), false, None, &buffer_snapshot); + let chunks = CustomHighlightsChunks::new( + MultiBufferOffset(0)..buffer_snapshot.len(), + false, + None, + &buffer_snapshot, + ); for chunk in chunks { let chunk_text = chunk.text; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 4a628f866807aa9b1a1edd45fb9714a5fcc3d5d3..bb0d6885acc2afd95e97fe9121acd2d0580554f3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -5,16 +5,17 @@ use super::{ inlay_map::{InlayBufferRows, InlayChunks, InlayEdit, InlayOffset, InlayPoint, InlaySnapshot}, }; use gpui::{AnyElement, App, ElementId, HighlightStyle, Pixels, Window}; -use language::{Edit, HighlightId, Point, TextSummary}; +use language::{Edit, HighlightId, Point}; use multi_buffer::{ - Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, + Anchor, AnchorRangeExt, MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, + RowInfo, ToOffset, }; use project::InlayId; use std::{ any::TypeId, cmp::{self, Ordering}, fmt, iter, - ops::{Add, AddAssign, Deref, DerefMut, Range, Sub}, + ops::{Add, AddAssign, Deref, DerefMut, Range, Sub, SubAssign}, sync::Arc, usize, }; @@ -98,6 +99,7 @@ impl FoldPoint { &mut self.0.column } + #[ztracing::instrument(skip_all)] pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { let (start, _, _) = snapshot .transforms @@ -106,6 +108,7 @@ impl FoldPoint { InlayPoint(start.1.0 + overshoot) } + #[ztracing::instrument(skip_all)] pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { let (start, _, item) = snapshot .transforms @@ -137,6 +140,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { pub(crate) struct FoldMapWriter<'a>(&'a mut FoldMap); impl FoldMapWriter<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn fold( &mut self, ranges: impl IntoIterator, FoldPlaceholder)>, @@ -201,6 +205,7 @@ impl FoldMapWriter<'_> { } /// Removes any folds with the given ranges. + #[ztracing::instrument(skip_all)] pub(crate) fn remove_folds( &mut self, ranges: impl IntoIterator>, @@ -214,6 +219,7 @@ impl FoldMapWriter<'_> { } /// Removes any folds whose ranges intersect the given ranges. + #[ztracing::instrument(skip_all)] pub(crate) fn unfold_intersecting( &mut self, ranges: impl IntoIterator>, @@ -224,6 +230,7 @@ impl FoldMapWriter<'_> { /// Removes any folds that intersect the given ranges and for which the given predicate /// returns true. + #[ztracing::instrument(skip_all)] fn remove_folds_with( &mut self, ranges: impl IntoIterator>, @@ -261,7 +268,7 @@ impl FoldMapWriter<'_> { fold_ixs_to_delete.dedup(); self.0.snapshot.folds = { - let mut cursor = self.0.snapshot.folds.cursor::(buffer); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); let mut folds = SumTree::new(buffer); for fold_ix in fold_ixs_to_delete { folds.append(cursor.slice(&fold_ix, Bias::Right), buffer); @@ -276,6 +283,7 @@ impl FoldMapWriter<'_> { (self.0.snapshot.clone(), edits) } + #[ztracing::instrument(skip_all)] pub(crate) fn update_fold_widths( &mut self, new_widths: impl IntoIterator, @@ -325,6 +333,7 @@ pub struct FoldMap { } impl FoldMap { + #[ztracing::instrument(skip_all)] pub fn new(inlay_snapshot: InlaySnapshot) -> (Self, FoldSnapshot) { let this = Self { snapshot: FoldSnapshot { @@ -349,6 +358,7 @@ impl FoldMap { (this, snapshot) } + #[ztracing::instrument(skip_all)] pub fn read( &mut self, inlay_snapshot: InlaySnapshot, @@ -359,6 +369,7 @@ impl FoldMap { (self.snapshot.clone(), edits) } + #[ztracing::instrument(skip_all)] pub(crate) fn write( &mut self, inlay_snapshot: InlaySnapshot, @@ -368,6 +379,7 @@ impl FoldMap { (FoldMapWriter(self), snapshot, edits) } + #[ztracing::instrument(skip_all)] fn check_invariants(&self) { if cfg!(test) { assert_eq!( @@ -397,6 +409,7 @@ impl FoldMap { } } + #[ztracing::instrument(skip_all)] fn sync( &mut self, inlay_snapshot: InlaySnapshot, @@ -413,7 +426,7 @@ impl FoldMap { let mut new_transforms = SumTree::::default(); let mut cursor = self.snapshot.transforms.cursor::(()); - cursor.seek(&InlayOffset(0), Bias::Right); + cursor.seek(&InlayOffset(MultiBufferOffset(0)), Bias::Right); while let Some(mut edit) = inlay_edits_iter.next() { if let Some(item) = cursor.item() @@ -436,7 +449,7 @@ impl FoldMap { cursor.seek(&edit.old.end, Bias::Right); cursor.next(); - let mut delta = edit.new_len().0 as isize - edit.old_len().0 as isize; + let mut delta = edit.new_len() as isize - edit.old_len() as isize; loop { edit.old.end = *cursor.start(); @@ -446,7 +459,7 @@ impl FoldMap { } let next_edit = inlay_edits_iter.next().unwrap(); - delta += next_edit.new_len().0 as isize - next_edit.old_len().0 as isize; + delta += next_edit.new_len() as isize - next_edit.old_len() as isize; if next_edit.old.end >= edit.old.end { edit.old.end = next_edit.old.end; @@ -458,8 +471,9 @@ impl FoldMap { } } - edit.new.end = - InlayOffset(((edit.new.start + edit.old_len()).0 as isize + delta) as usize); + edit.new.end = InlayOffset(MultiBufferOffset( + ((edit.new.start + edit.old_len()).0.0 as isize + delta) as usize, + )); let anchor = inlay_snapshot .buffer @@ -522,7 +536,7 @@ impl FoldMap { new_transforms.push( Transform { summary: TransformSummary { - output: TextSummary::from(ELLIPSIS), + output: MBTextSummary::from(ELLIPSIS), input: inlay_snapshot .text_summary_for_range(fold_range.start..fold_range.end), }, @@ -579,7 +593,7 @@ impl FoldMap { edit.old.start = old_transforms.start().0; } let old_start = - old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0).0; + old_transforms.start().1.0 + (edit.old.start - old_transforms.start().0); old_transforms.seek_forward(&edit.old.end, Bias::Right); if old_transforms.item().is_some_and(|t| t.is_fold()) { @@ -587,14 +601,14 @@ impl FoldMap { edit.old.end = old_transforms.start().0; } let old_end = - old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0).0; + old_transforms.start().1.0 + (edit.old.end - old_transforms.start().0); new_transforms.seek(&edit.new.start, Bias::Left); if new_transforms.item().is_some_and(|t| t.is_fold()) { edit.new.start = new_transforms.start().0; } let new_start = - new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0).0; + new_transforms.start().1.0 + (edit.new.start - new_transforms.start().0); new_transforms.seek_forward(&edit.new.end, Bias::Right); if new_transforms.item().is_some_and(|t| t.is_fold()) { @@ -602,7 +616,7 @@ impl FoldMap { edit.new.end = new_transforms.start().0; } let new_end = - new_transforms.start().1.0 + (edit.new.end - new_transforms.start().0).0; + new_transforms.start().1.0 + (edit.new.end - new_transforms.start().0); fold_edits.push(FoldEdit { old: FoldOffset(old_start)..FoldOffset(old_end), @@ -643,15 +657,20 @@ impl FoldSnapshot { &self.inlay_snapshot.buffer } + #[ztracing::instrument(skip_all)] fn fold_width(&self, fold_id: &FoldId) -> Option { self.fold_metadata_by_id.get(fold_id)?.width } #[cfg(test)] pub fn text(&self) -> String { - self.chunks(FoldOffset(0)..self.len(), false, Highlights::default()) - .map(|c| c.text) - .collect() + self.chunks( + FoldOffset(MultiBufferOffset(0))..self.len(), + false, + Highlights::default(), + ) + .map(|c| c.text) + .collect() } #[cfg(test)] @@ -659,8 +678,9 @@ impl FoldSnapshot { self.folds.items(&self.inlay_snapshot.buffer).len() } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary { - let mut summary = TextSummary::default(); + #[ztracing::instrument(skip_all)] + pub fn text_summary_for_range(&self, range: Range) -> MBTextSummary { + let mut summary = MBTextSummary::default(); let mut cursor = self .transforms @@ -670,7 +690,7 @@ impl FoldSnapshot { let start_in_transform = range.start.0 - cursor.start().0.0; let end_in_transform = cmp::min(range.end, cursor.end().0).0 - cursor.start().0.0; if let Some(placeholder) = transform.placeholder.as_ref() { - summary = TextSummary::from( + summary = MBTextSummary::from( &placeholder.text [start_in_transform.column as usize..end_in_transform.column as usize], ); @@ -689,14 +709,14 @@ impl FoldSnapshot { if range.end > cursor.end().0 { cursor.next(); - summary += &cursor + summary += cursor .summary::<_, TransformSummary>(&range.end, Bias::Right) .output; if let Some(transform) = cursor.item() { let end_in_transform = range.end.0 - cursor.start().0.0; if let Some(placeholder) = transform.placeholder.as_ref() { summary += - TextSummary::from(&placeholder.text[..end_in_transform.column as usize]); + MBTextSummary::from(&placeholder.text[..end_in_transform.column as usize]); } else { let inlay_start = self.inlay_snapshot.to_offset(cursor.start().1); let inlay_end = self @@ -712,6 +732,7 @@ impl FoldSnapshot { summary } + #[ztracing::instrument(skip_all)] pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { let (start, end, item) = self .transforms @@ -728,10 +749,20 @@ impl FoldSnapshot { } } + #[ztracing::instrument(skip_all)] + pub fn fold_point_cursor(&self) -> FoldPointCursor<'_> { + let cursor = self + .transforms + .cursor::>(()); + FoldPointCursor { cursor } + } + + #[ztracing::instrument(skip_all)] pub fn len(&self) -> FoldOffset { FoldOffset(self.transforms.summary().output.len) } + #[ztracing::instrument(skip_all)] pub fn line_len(&self, row: u32) -> u32 { let line_start = FoldPoint::new(row, 0).to_offset(self).0; let line_end = if row >= self.max_point().row() { @@ -742,6 +773,7 @@ impl FoldSnapshot { (line_end - line_start) as u32 } + #[ztracing::instrument(skip_all)] pub fn row_infos(&self, start_row: u32) -> FoldRows<'_> { if start_row > self.transforms.summary().output.lines.row { panic!("invalid display row {}", start_row); @@ -764,6 +796,7 @@ impl FoldSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn max_point(&self) -> FoldPoint { FoldPoint(self.transforms.summary().output.lines) } @@ -773,6 +806,7 @@ impl FoldSnapshot { self.transforms.summary().output.longest_row } + #[ztracing::instrument(skip_all)] pub fn folds_in_range(&self, range: Range) -> impl Iterator where T: ToOffset, @@ -787,6 +821,7 @@ impl FoldSnapshot { }) } + #[ztracing::instrument(skip_all)] pub fn intersects_fold(&self, offset: T) -> bool where T: ToOffset, @@ -799,6 +834,7 @@ impl FoldSnapshot { item.is_some_and(|t| t.placeholder.is_some()) } + #[ztracing::instrument(skip_all)] pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool { let mut inlay_point = self .inlay_snapshot @@ -827,6 +863,7 @@ impl FoldSnapshot { } } + #[ztracing::instrument(skip_all)] pub(crate) fn chunks<'a>( &'a self, range: Range, @@ -839,8 +876,8 @@ impl FoldSnapshot { transform_cursor.seek(&range.start, Bias::Right); let inlay_start = { - let overshoot = range.start.0 - transform_cursor.start().0.0; - transform_cursor.start().1 + InlayOffset(overshoot) + let overshoot = range.start - transform_cursor.start().0; + transform_cursor.start().1 + overshoot }; let transform_end = transform_cursor.end(); @@ -851,8 +888,8 @@ impl FoldSnapshot { { inlay_start } else if range.end < transform_end.0 { - let overshoot = range.end.0 - transform_cursor.start().0.0; - transform_cursor.start().1 + InlayOffset(overshoot) + let overshoot = range.end - transform_cursor.start().0; + transform_cursor.start().1 + overshoot } else { transform_end.1 }; @@ -871,6 +908,7 @@ impl FoldSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator { self.chunks( start.to_offset(self)..self.len(), @@ -880,6 +918,7 @@ impl FoldSnapshot { .flat_map(|chunk| chunk.text.chars()) } + #[ztracing::instrument(skip_all)] pub fn chunks_at(&self, start: FoldPoint) -> FoldChunks<'_> { self.chunks( start.to_offset(self)..self.len(), @@ -889,6 +928,7 @@ impl FoldSnapshot { } #[cfg(test)] + #[ztracing::instrument(skip_all)] pub fn clip_offset(&self, offset: FoldOffset, bias: Bias) -> FoldOffset { if offset > self.len() { self.len() @@ -897,6 +937,7 @@ impl FoldSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { let (start, end, item) = self .transforms @@ -921,7 +962,33 @@ impl FoldSnapshot { } } -fn push_isomorphic(transforms: &mut SumTree, summary: TextSummary) { +pub struct FoldPointCursor<'transforms> { + cursor: Cursor<'transforms, 'static, Transform, Dimensions>, +} + +impl FoldPointCursor<'_> { + #[ztracing::instrument(skip_all)] + pub fn map(&mut self, point: InlayPoint, bias: Bias) -> FoldPoint { + let cursor = &mut self.cursor; + if cursor.did_seek() { + cursor.seek_forward(&point, Bias::Right); + } else { + cursor.seek(&point, Bias::Right); + } + if cursor.item().is_some_and(|t| t.is_fold()) { + if bias == Bias::Left || point == cursor.start().0 { + cursor.start().1 + } else { + cursor.end().1 + } + } else { + let overshoot = point.0 - cursor.start().0.0; + FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0)) + } + } +} + +fn push_isomorphic(transforms: &mut SumTree, summary: MBTextSummary) { let mut did_merge = false; transforms.update_last( |last| { @@ -950,13 +1017,13 @@ fn push_isomorphic(transforms: &mut SumTree, summary: TextSummary) { fn intersecting_folds<'a>( inlay_snapshot: &'a InlaySnapshot, folds: &'a SumTree, - range: Range, + range: Range, inclusive: bool, -) -> FilterCursor<'a, 'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, usize> { +) -> FilterCursor<'a, 'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, MultiBufferOffset> { let buffer = &inlay_snapshot.buffer; let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - let mut cursor = folds.filter::<_, usize>(buffer, move |summary| { + let mut cursor = folds.filter::<_, MultiBufferOffset>(buffer, move |summary| { let start_cmp = start.cmp(&summary.max_end, buffer); let end_cmp = end.cmp(&summary.min_start, buffer); @@ -1061,8 +1128,8 @@ impl Transform { #[derive(Clone, Debug, Default, Eq, PartialEq)] struct TransformSummary { - output: TextSummary, - input: TextSummary, + output: MBTextSummary, + input: MBTextSummary, } impl sum_tree::Item for Transform { @@ -1079,8 +1146,8 @@ impl sum_tree::ContextLessSummary for TransformSummary { } fn add_summary(&mut self, other: &Self) { - self.input += &other.input; - self.output += &other.output; + self.input += other.input; + self.output += other.output; } } @@ -1211,7 +1278,7 @@ impl sum_tree::SeekTarget<'_, FoldSummary, FoldRange> for FoldRange { } } -impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { +impl<'a> sum_tree::Dimension<'a, FoldSummary> for MultiBufferOffset { fn zero(_cx: &MultiBufferSnapshot) -> Self { Default::default() } @@ -1229,6 +1296,7 @@ pub struct FoldRows<'a> { } impl FoldRows<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn seek(&mut self, row: u32) { let fold_point = FoldPoint::new(row, 0); self.cursor.seek(&fold_point, Bias::Left); @@ -1242,6 +1310,7 @@ impl FoldRows<'_> { impl Iterator for FoldRows<'_> { type Item = RowInfo; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { let mut traversed_fold = false; while self.fold_point > self.cursor.end().0 { @@ -1353,12 +1422,13 @@ pub struct FoldChunks<'a> { } impl FoldChunks<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn seek(&mut self, range: Range) { self.transform_cursor.seek(&range.start, Bias::Right); let inlay_start = { - let overshoot = range.start.0 - self.transform_cursor.start().0.0; - self.transform_cursor.start().1 + InlayOffset(overshoot) + let overshoot = range.start - self.transform_cursor.start().0; + self.transform_cursor.start().1 + overshoot }; let transform_end = self.transform_cursor.end(); @@ -1370,8 +1440,8 @@ impl FoldChunks<'_> { { inlay_start } else if range.end < transform_end.0 { - let overshoot = range.end.0 - self.transform_cursor.start().0.0; - self.transform_cursor.start().1 + InlayOffset(overshoot) + let overshoot = range.end - self.transform_cursor.start().0; + self.transform_cursor.start().1 + overshoot } else { transform_end.1 }; @@ -1387,6 +1457,7 @@ impl FoldChunks<'_> { impl<'a> Iterator for FoldChunks<'a> { type Item = Chunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.output_offset >= self.max_output_offset { return None; @@ -1423,8 +1494,8 @@ impl<'a> Iterator for FoldChunks<'a> { let transform_start = self.transform_cursor.start(); let transform_end = self.transform_cursor.end(); let inlay_end = if self.max_output_offset < transform_end.0 { - let overshoot = self.max_output_offset.0 - transform_start.0.0; - transform_start.1 + InlayOffset(overshoot) + let overshoot = self.max_output_offset - transform_start.0; + transform_start.1 + overshoot } else { transform_end.1 }; @@ -1441,15 +1512,15 @@ impl<'a> Iterator for FoldChunks<'a> { // Otherwise, take a chunk from the buffer's text. if let Some((buffer_chunk_start, mut inlay_chunk)) = self.inlay_chunk.clone() { let chunk = &mut inlay_chunk.chunk; - let buffer_chunk_end = buffer_chunk_start + InlayOffset(chunk.text.len()); + let buffer_chunk_end = buffer_chunk_start + chunk.text.len(); let transform_end = self.transform_cursor.end().1; let chunk_end = buffer_chunk_end.min(transform_end); - let bit_start = (self.inlay_offset - buffer_chunk_start).0; - let bit_end = (chunk_end - buffer_chunk_start).0; + let bit_start = self.inlay_offset - buffer_chunk_start; + let bit_end = chunk_end - buffer_chunk_start; chunk.text = &chunk.text[bit_start..bit_end]; - let bit_end = (chunk_end - buffer_chunk_start).0; + let bit_end = chunk_end - buffer_chunk_start; let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1); chunk.tabs = (chunk.tabs >> bit_start) & mask; @@ -1483,9 +1554,10 @@ impl<'a> Iterator for FoldChunks<'a> { } #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] -pub struct FoldOffset(pub usize); +pub struct FoldOffset(pub MultiBufferOffset); impl FoldOffset { + #[ztracing::instrument(skip_all)] pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { let (start, _, item) = snapshot .transforms @@ -1493,7 +1565,7 @@ impl FoldOffset { let overshoot = if item.is_none_or(|t| t.is_fold()) { Point::new(0, (self.0 - start.0.0) as u32) } else { - let inlay_offset = start.1.input.len + self.0 - start.0.0; + let inlay_offset = start.1.input.len + (self - start.0); let inlay_point = snapshot.inlay_snapshot.to_point(InlayOffset(inlay_offset)); inlay_point.0 - start.1.input.lines }; @@ -1501,11 +1573,12 @@ impl FoldOffset { } #[cfg(test)] + #[ztracing::instrument(skip_all)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { let (start, _, _) = snapshot .transforms .find::, _>((), &self, Bias::Right); - let overshoot = self.0 - start.0.0; + let overshoot = self - start.0; InlayOffset(start.1.0 + overshoot) } } @@ -1518,17 +1591,46 @@ impl Add for FoldOffset { } } +impl Sub for FoldOffset { + type Output = ::Output; + + fn sub(self, rhs: Self) -> Self::Output { + self.0 - rhs.0 + } +} + +impl SubAssign for FoldOffset +where + MultiBufferOffset: SubAssign, +{ + fn sub_assign(&mut self, rhs: T) { + self.0 -= rhs; + } +} + +impl Add for FoldOffset +where + MultiBufferOffset: Add, +{ + type Output = Self; + + fn add(self, rhs: T) -> Self::Output { + Self(self.0 + rhs) + } +} + impl AddAssign for FoldOffset { fn add_assign(&mut self, rhs: Self) { self.0 += rhs.0; } } -impl Sub for FoldOffset { - type Output = Self; - - fn sub(self, rhs: Self) -> Self::Output { - Self(self.0 - rhs.0) +impl AddAssign for FoldOffset +where + MultiBufferOffset: AddAssign, +{ + fn add_assign(&mut self, rhs: T) { + self.0 += rhs; } } @@ -1538,7 +1640,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { } fn add_summary(&mut self, summary: &'a TransformSummary, _: ()) { - self.0 += &summary.output.len; + self.0 += summary.output.len; } } @@ -1558,7 +1660,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { } fn add_summary(&mut self, summary: &'a TransformSummary, _: ()) { - self.0 += &summary.input.len; + self.0 += summary.input.len; } } @@ -1596,12 +1698,12 @@ mod tests { edits, &[ FoldEdit { - old: FoldOffset(2)..FoldOffset(16), - new: FoldOffset(2)..FoldOffset(5), + old: FoldOffset(MultiBufferOffset(2))..FoldOffset(MultiBufferOffset(16)), + new: FoldOffset(MultiBufferOffset(2))..FoldOffset(MultiBufferOffset(5)), }, FoldEdit { - old: FoldOffset(18)..FoldOffset(29), - new: FoldOffset(7)..FoldOffset(10) + old: FoldOffset(MultiBufferOffset(18))..FoldOffset(MultiBufferOffset(29)), + new: FoldOffset(MultiBufferOffset(7))..FoldOffset(MultiBufferOffset(10)), }, ] ); @@ -1626,12 +1728,12 @@ mod tests { edits, &[ FoldEdit { - old: FoldOffset(0)..FoldOffset(1), - new: FoldOffset(0)..FoldOffset(3), + old: FoldOffset(MultiBufferOffset(0))..FoldOffset(MultiBufferOffset(1)), + new: FoldOffset(MultiBufferOffset(0))..FoldOffset(MultiBufferOffset(3)), }, FoldEdit { - old: FoldOffset(6)..FoldOffset(6), - new: FoldOffset(8)..FoldOffset(11), + old: FoldOffset(MultiBufferOffset(6))..FoldOffset(MultiBufferOffset(6)), + new: FoldOffset(MultiBufferOffset(8))..FoldOffset(MultiBufferOffset(11)), }, ] ); @@ -1668,15 +1770,24 @@ mod tests { let mut map = FoldMap::new(inlay_snapshot.clone()).0; let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); - writer.fold(vec![(5..8, FoldPlaceholder::test())]); + writer.fold(vec![( + MultiBufferOffset(5)..MultiBufferOffset(8), + FoldPlaceholder::test(), + )]); let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); assert_eq!(snapshot.text(), "abcde⋯ijkl"); // Create an fold adjacent to the start of the first fold. let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); writer.fold(vec![ - (0..1, FoldPlaceholder::test()), - (2..5, FoldPlaceholder::test()), + ( + MultiBufferOffset(0)..MultiBufferOffset(1), + FoldPlaceholder::test(), + ), + ( + MultiBufferOffset(2)..MultiBufferOffset(5), + FoldPlaceholder::test(), + ), ]); let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); assert_eq!(snapshot.text(), "⋯b⋯ijkl"); @@ -1684,8 +1795,14 @@ mod tests { // Create an fold adjacent to the end of the first fold. let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); writer.fold(vec![ - (11..11, FoldPlaceholder::test()), - (8..10, FoldPlaceholder::test()), + ( + MultiBufferOffset(11)..MultiBufferOffset(11), + FoldPlaceholder::test(), + ), + ( + MultiBufferOffset(8)..MultiBufferOffset(10), + FoldPlaceholder::test(), + ), ]); let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]); assert_eq!(snapshot.text(), "⋯b⋯kl"); @@ -1697,15 +1814,25 @@ mod tests { // Create two adjacent folds. let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]); writer.fold(vec![ - (0..2, FoldPlaceholder::test()), - (2..5, FoldPlaceholder::test()), + ( + MultiBufferOffset(0)..MultiBufferOffset(2), + FoldPlaceholder::test(), + ), + ( + MultiBufferOffset(2)..MultiBufferOffset(5), + FoldPlaceholder::test(), + ), ]); let (snapshot, _) = map.read(inlay_snapshot, vec![]); assert_eq!(snapshot.text(), "⋯fghijkl"); // Edit within one of the folds. let buffer_snapshot = buffer.update(cx, |buffer, cx| { - buffer.edit([(0..1, "12345")], None, cx); + buffer.edit( + [(MultiBufferOffset(0)..MultiBufferOffset(1), "12345")], + None, + cx, + ); buffer.snapshot(cx) }); let (inlay_snapshot, inlay_edits) = @@ -1849,7 +1976,7 @@ mod tests { for fold_range in map.merged_folds().into_iter().rev() { let fold_inlay_start = inlay_snapshot.to_inlay_offset(fold_range.start); let fold_inlay_end = inlay_snapshot.to_inlay_offset(fold_range.end); - expected_text.replace_range(fold_inlay_start.0..fold_inlay_end.0, "⋯"); + expected_text.replace_range(fold_inlay_start.0.0..fold_inlay_end.0.0, "⋯"); } assert_eq!(snapshot.text(), expected_text); @@ -1898,7 +2025,7 @@ mod tests { .chars() .count(); let mut fold_point = FoldPoint::new(0, 0); - let mut fold_offset = FoldOffset(0); + let mut fold_offset = FoldOffset(MultiBufferOffset(0)); let mut char_column = 0; for c in expected_text.chars() { let inlay_point = fold_point.to_inlay_point(&snapshot); @@ -1944,18 +2071,18 @@ mod tests { for _ in 0..5 { let mut start = snapshot.clip_offset( - FoldOffset(rng.random_range(0..=snapshot.len().0)), + FoldOffset(rng.random_range(MultiBufferOffset(0)..=snapshot.len().0)), Bias::Left, ); let mut end = snapshot.clip_offset( - FoldOffset(rng.random_range(0..=snapshot.len().0)), + FoldOffset(rng.random_range(MultiBufferOffset(0)..=snapshot.len().0)), Bias::Right, ); if start > end { mem::swap(&mut start, &mut end); } - let text = &expected_text[start.0..end.0]; + let text = &expected_text[start.0.0..end.0.0]; assert_eq!( snapshot .chunks(start..end, false, Highlights::default()) @@ -2004,9 +2131,12 @@ mod tests { } for _ in 0..5 { - let end = - buffer_snapshot.clip_offset(rng.random_range(0..=buffer_snapshot.len()), Right); - let start = buffer_snapshot.clip_offset(rng.random_range(0..=end), Left); + let end = buffer_snapshot.clip_offset( + rng.random_range(MultiBufferOffset(0)..=buffer_snapshot.len()), + Right, + ); + let start = + buffer_snapshot.clip_offset(rng.random_range(MultiBufferOffset(0)..=end), Left); let expected_folds = map .snapshot .folds @@ -2046,7 +2176,7 @@ mod tests { let bytes = start.to_offset(&snapshot)..end.to_offset(&snapshot); assert_eq!( snapshot.text_summary_for_range(lines), - TextSummary::from(&text[bytes.start.0..bytes.end.0]) + MBTextSummary::from(&text[bytes.start.0.0..bytes.end.0.0]) ) } @@ -2054,8 +2184,8 @@ mod tests { for (snapshot, edits) in snapshot_edits.drain(..) { let new_text = snapshot.text(); for edit in edits { - let old_bytes = edit.new.start.0..edit.new.start.0 + edit.old_len().0; - let new_bytes = edit.new.start.0..edit.new.end.0; + let old_bytes = edit.new.start.0.0..edit.new.start.0.0 + edit.old_len(); + let new_bytes = edit.new.start.0.0..edit.new.end.0.0; text.replace_range(old_bytes, &new_text[new_bytes]); } @@ -2126,7 +2256,7 @@ mod tests { // Get all chunks and verify their bitmaps let chunks = snapshot.chunks( - FoldOffset(0)..FoldOffset(snapshot.len().0), + FoldOffset(MultiBufferOffset(0))..FoldOffset(snapshot.len().0), false, Highlights::default(), ); @@ -2195,7 +2325,7 @@ mod tests { } impl FoldMap { - fn merged_folds(&self) -> Vec> { + fn merged_folds(&self) -> Vec> { let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); let buffer = &inlay_snapshot.buffer; let mut folds = self.snapshot.folds.items(buffer); @@ -2236,8 +2366,12 @@ mod tests { let buffer = &inlay_snapshot.buffer; let mut to_unfold = Vec::new(); for _ in 0..rng.random_range(1..=3) { - let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.random_range(0..=end), Left); + let end = buffer.clip_offset( + rng.random_range(MultiBufferOffset(0)..=buffer.len()), + Right, + ); + let start = + buffer.clip_offset(rng.random_range(MultiBufferOffset(0)..=end), Left); to_unfold.push(start..end); } let inclusive = rng.random(); @@ -2252,8 +2386,12 @@ mod tests { let buffer = &inlay_snapshot.buffer; let mut to_fold = Vec::new(); for _ in 0..rng.random_range(1..=2) { - let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.random_range(0..=end), Left); + let end = buffer.clip_offset( + rng.random_range(MultiBufferOffset(0)..=buffer.len()), + Right, + ); + let start = + buffer.clip_offset(rng.random_range(MultiBufferOffset(0)..=end), Left); to_fold.push((start..end, FoldPlaceholder::test())); } log::info!("folding {:?}", to_fold); diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 697cf1f68ceac4f6a777a3ec401658394f646af5..cbdc4b18fee452163c5a11932c968cb7cc500f96 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -1,10 +1,20 @@ +//! The inlay map. See the [`display_map`][super] docs for an overview of how the inlay map fits +//! into the rest of the [`DisplayMap`][super::DisplayMap]. Much of the documentation for this +//! module generalizes to other layers. +//! +//! The core of this module is the [`InlayMap`] struct, which maintains a vec of [`Inlay`]s, and +//! [`InlaySnapshot`], which holds a sum tree of [`Transform`]s. + use crate::{ ChunkRenderer, HighlightStyles, inlays::{Inlay, InlayContent}, }; use collections::BTreeSet; use language::{Chunk, Edit, Point, TextSummary}; -use multi_buffer::{MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, RowInfo, ToOffset}; +use multi_buffer::{ + MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, + RowInfo, ToOffset, +}; use project::InlayId; use std::{ cmp, @@ -42,13 +52,14 @@ impl std::ops::Deref for InlaySnapshot { #[derive(Clone, Debug)] enum Transform { - Isomorphic(TextSummary), + Isomorphic(MBTextSummary), Inlay(Inlay), } impl sum_tree::Item for Transform { type Summary = TransformSummary; + #[ztracing::instrument(skip_all)] fn summary(&self, _: ()) -> Self::Summary { match self { Transform::Isomorphic(summary) => TransformSummary { @@ -56,8 +67,8 @@ impl sum_tree::Item for Transform { output: *summary, }, Transform::Inlay(inlay) => TransformSummary { - input: TextSummary::default(), - output: inlay.text().summary(), + input: MBTextSummary::default(), + output: MBTextSummary::from(inlay.text().summary()), }, } } @@ -65,8 +76,10 @@ impl sum_tree::Item for Transform { #[derive(Clone, Debug, Default)] struct TransformSummary { - input: TextSummary, - output: TextSummary, + /// Summary of the text before inlays have been applied. + input: MBTextSummary, + /// Summary of the text after inlays have been applied. + output: MBTextSummary, } impl sum_tree::ContextLessSummary for TransformSummary { @@ -75,15 +88,15 @@ impl sum_tree::ContextLessSummary for TransformSummary { } fn add_summary(&mut self, other: &Self) { - self.input += &other.input; - self.output += &other.output; + self.input += other.input; + self.output += other.output; } } pub type InlayEdit = Edit; #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] -pub struct InlayOffset(pub usize); +pub struct InlayOffset(pub MultiBufferOffset); impl Add for InlayOffset { type Output = Self; @@ -94,10 +107,30 @@ impl Add for InlayOffset { } impl Sub for InlayOffset { - type Output = Self; + type Output = ::Output; fn sub(self, rhs: Self) -> Self::Output { - Self(self.0 - rhs.0) + self.0 - rhs.0 + } +} + +impl SubAssign for InlayOffset +where + MultiBufferOffset: SubAssign, +{ + fn sub_assign(&mut self, rhs: T) { + self.0 -= rhs; + } +} + +impl Add for InlayOffset +where + MultiBufferOffset: Add, +{ + type Output = Self; + + fn add(self, rhs: T) -> Self::Output { + Self(self.0 + rhs) } } @@ -107,9 +140,12 @@ impl AddAssign for InlayOffset { } } -impl SubAssign for InlayOffset { - fn sub_assign(&mut self, rhs: Self) { - self.0 -= rhs.0; +impl AddAssign for InlayOffset +where + MultiBufferOffset: AddAssign, +{ + fn add_assign(&mut self, rhs: T) { + self.0 += rhs; } } @@ -119,7 +155,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { } fn add_summary(&mut self, summary: &'a TransformSummary, _: ()) { - self.0 += &summary.output.len; + self.0 += summary.output.len; } } @@ -152,13 +188,13 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { } } -impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { +impl<'a> sum_tree::Dimension<'a, TransformSummary> for MultiBufferOffset { fn zero(_cx: ()) -> Self { Default::default() } fn add_summary(&mut self, summary: &'a TransformSummary, _: ()) { - *self += &summary.input.len; + *self += summary.input.len; } } @@ -181,7 +217,7 @@ pub struct InlayBufferRows<'a> { } pub struct InlayChunks<'a> { - transforms: Cursor<'a, 'static, Transform, Dimensions>, + transforms: Cursor<'a, 'static, Transform, Dimensions>, buffer_chunks: CustomHighlightsChunks<'a>, buffer_chunk: Option>, inlay_chunks: Option>, @@ -202,6 +238,7 @@ pub struct InlayChunk<'a> { } impl InlayChunks<'_> { + #[ztracing::instrument(skip_all)] pub fn seek(&mut self, new_range: Range) { self.transforms.seek(&new_range.start, Bias::Right); @@ -222,6 +259,7 @@ impl InlayChunks<'_> { impl<'a> Iterator for InlayChunks<'a> { type Item = InlayChunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.output_offset == self.max_output_offset { return None; @@ -248,10 +286,8 @@ impl<'a> Iterator for InlayChunks<'a> { // Determine split index handling edge cases let split_index = if desired_bytes >= chunk.text.len() { chunk.text.len() - } else if chunk.text.is_char_boundary(desired_bytes) { - desired_bytes } else { - find_next_utf8_boundary(chunk.text, desired_bytes) + chunk.text.ceil_char_boundary(desired_bytes) }; let (prefix, suffix) = chunk.text.split_at(split_index); @@ -334,12 +370,12 @@ impl<'a> Iterator for InlayChunks<'a> { let offset_in_inlay = self.output_offset - self.transforms.start().0; if let Some((style, highlight)) = inlay_style_and_highlight { let range = &highlight.range; - if offset_in_inlay.0 < range.start { - next_inlay_highlight_endpoint = range.start - offset_in_inlay.0; - } else if offset_in_inlay.0 >= range.end { + if offset_in_inlay < range.start { + next_inlay_highlight_endpoint = range.start - offset_in_inlay; + } else if offset_in_inlay >= range.end { next_inlay_highlight_endpoint = usize::MAX; } else { - next_inlay_highlight_endpoint = range.end - offset_in_inlay.0; + next_inlay_highlight_endpoint = range.end - offset_in_inlay; highlight_style = highlight_style .map(|highlight| highlight.highlight(*style)) .or_else(|| Some(*style)); @@ -352,7 +388,7 @@ impl<'a> Iterator for InlayChunks<'a> { let start = offset_in_inlay; let end = cmp::min(self.max_output_offset, self.transforms.end().0) - self.transforms.start().0; - let chunks = inlay.text().chunks_in_range(start.0..end.0); + let chunks = inlay.text().chunks_in_range(start..end); text::ChunkWithBitmaps(chunks) }); let ChunkBitmaps { @@ -373,10 +409,8 @@ impl<'a> Iterator for InlayChunks<'a> { .next() .map(|c| c.len_utf8()) .unwrap_or(1) - } else if inlay_chunk.is_char_boundary(next_inlay_highlight_endpoint) { - next_inlay_highlight_endpoint } else { - find_next_utf8_boundary(inlay_chunk, next_inlay_highlight_endpoint) + inlay_chunk.ceil_char_boundary(next_inlay_highlight_endpoint) }; let (chunk, remainder) = inlay_chunk.split_at(split_index); @@ -419,6 +453,7 @@ impl<'a> Iterator for InlayChunks<'a> { } impl InlayBufferRows<'_> { + #[ztracing::instrument(skip_all)] pub fn seek(&mut self, row: u32) { let inlay_point = InlayPoint::new(row, 0); self.transforms.seek(&inlay_point, Bias::Left); @@ -443,6 +478,7 @@ impl InlayBufferRows<'_> { impl Iterator for InlayBufferRows<'_> { type Item = RowInfo; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { let buffer_row = if self.inlay_row == 0 { self.buffer_rows.next().unwrap() @@ -472,6 +508,7 @@ impl InlayPoint { } impl InlayMap { + #[ztracing::instrument(skip_all)] pub fn new(buffer: MultiBufferSnapshot) -> (Self, InlaySnapshot) { let version = 0; let snapshot = InlaySnapshot { @@ -489,10 +526,11 @@ impl InlayMap { ) } + #[ztracing::instrument(skip_all)] pub fn sync( &mut self, buffer_snapshot: MultiBufferSnapshot, - mut buffer_edits: Vec>, + mut buffer_edits: Vec>, ) -> (InlaySnapshot, Vec) { let snapshot = &mut self.snapshot; @@ -523,7 +561,7 @@ impl InlayMap { let mut new_transforms = SumTree::default(); let mut cursor = snapshot .transforms - .cursor::>(()); + .cursor::>(()); let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left), ()); @@ -535,11 +573,9 @@ impl InlayMap { } // Remove all the inlays and transforms contained by the edit. - let old_start = - cursor.start().1 + InlayOffset(buffer_edit.old.start - cursor.start().0); + let old_start = cursor.start().1 + (buffer_edit.old.start - cursor.start().0); cursor.seek(&buffer_edit.old.end, Bias::Right); - let old_end = - cursor.start().1 + InlayOffset(buffer_edit.old.end - cursor.start().0); + let old_end = cursor.start().1 + (buffer_edit.old.end - cursor.start().0); // Push the unchanged prefix. let prefix_start = new_transforms.summary().input.len; @@ -623,6 +659,7 @@ impl InlayMap { } } + #[ztracing::instrument(skip_all)] pub fn splice( &mut self, to_remove: &[InlayId], @@ -673,11 +710,13 @@ impl InlayMap { (snapshot, edits) } + #[ztracing::instrument(skip_all)] pub fn current_inlays(&self) -> impl Iterator { self.inlays.iter() } #[cfg(test)] + #[ztracing::instrument(skip_all)] pub(crate) fn randomly_mutate( &mut self, next_inlay_id: &mut usize, @@ -691,7 +730,10 @@ impl InlayMap { let snapshot = &mut self.snapshot; for i in 0..rng.random_range(1..=5) { if self.inlays.is_empty() || rng.random() { - let position = snapshot.buffer.random_byte_range(0, rng).start; + let position = snapshot + .buffer + .random_byte_range(MultiBufferOffset(0), rng) + .start; let bias = if rng.random() { Bias::Left } else { @@ -743,10 +785,13 @@ impl InlayMap { } impl InlaySnapshot { + #[ztracing::instrument(skip_all)] pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { - let (start, _, item) = self - .transforms - .find::, _>((), &offset, Bias::Right); + let (start, _, item) = self.transforms.find::, _>((), &offset, Bias::Right); let overshoot = offset.0 - start.0.0; match item { Some(Transform::Isomorphic(_)) => { @@ -764,14 +809,17 @@ impl InlaySnapshot { } } + #[ztracing::instrument(skip_all)] pub fn len(&self) -> InlayOffset { InlayOffset(self.transforms.summary().output.len) } + #[ztracing::instrument(skip_all)] pub fn max_point(&self) -> InlayPoint { InlayPoint(self.transforms.summary().output.lines) } + #[ztracing::instrument(skip_all, fields(point))] pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { let (start, _, item) = self .transforms @@ -792,6 +840,7 @@ impl InlaySnapshot { None => self.len(), } } + #[ztracing::instrument(skip_all)] pub fn to_buffer_point(&self, point: InlayPoint) -> Point { let (start, _, item) = self.transforms @@ -805,22 +854,26 @@ impl InlaySnapshot { None => self.buffer.max_point(), } } - pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { - let (start, _, item) = - self.transforms - .find::, _>((), &offset, Bias::Right); + #[ztracing::instrument(skip_all)] + pub fn to_buffer_offset(&self, offset: InlayOffset) -> MultiBufferOffset { + let (start, _, item) = self + .transforms + .find::, _>((), &offset, Bias::Right); match item { Some(Transform::Isomorphic(_)) => { let overshoot = offset - start.0; - start.1 + overshoot.0 + start.1 + overshoot } Some(Transform::Inlay(_)) => start.1, None => self.buffer.len(), } } - pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { - let mut cursor = self.transforms.cursor::>(()); + #[ztracing::instrument(skip_all)] + pub fn to_inlay_offset(&self, offset: MultiBufferOffset) -> InlayOffset { + let mut cursor = self + .transforms + .cursor::>(()); cursor.seek(&offset, Bias::Left); loop { match cursor.item() { @@ -852,40 +905,22 @@ impl InlaySnapshot { } } } + + #[ztracing::instrument(skip_all)] pub fn to_inlay_point(&self, point: Point) -> InlayPoint { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&point, Bias::Left); - loop { - match cursor.item() { - Some(Transform::Isomorphic(_)) => { - if point == cursor.end().0 { - while let Some(Transform::Inlay(inlay)) = cursor.next_item() { - if inlay.position.bias() == Bias::Right { - break; - } else { - cursor.next(); - } - } - return cursor.end().1; - } else { - let overshoot = point - cursor.start().0; - return InlayPoint(cursor.start().1.0 + overshoot); - } - } - Some(Transform::Inlay(inlay)) => { - if inlay.position.bias() == Bias::Left { - cursor.next(); - } else { - return cursor.start().1; - } - } - None => { - return self.max_point(); - } - } + self.inlay_point_cursor().map(point) + } + + #[ztracing::instrument(skip_all)] + pub fn inlay_point_cursor(&self) -> InlayPointCursor<'_> { + let cursor = self.transforms.cursor::>(()); + InlayPointCursor { + cursor, + transforms: &self.transforms, } } + #[ztracing::instrument(skip_all)] pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { let mut cursor = self.transforms.cursor::>(()); cursor.seek(&point, Bias::Left); @@ -977,14 +1012,18 @@ impl InlaySnapshot { } } - pub fn text_summary(&self) -> TextSummary { + #[ztracing::instrument(skip_all)] + pub fn text_summary(&self) -> MBTextSummary { self.transforms.summary().output } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary { - let mut summary = TextSummary::default(); + #[ztracing::instrument(skip_all)] + pub fn text_summary_for_range(&self, range: Range) -> MBTextSummary { + let mut summary = MBTextSummary::default(); - let mut cursor = self.transforms.cursor::>(()); + let mut cursor = self + .transforms + .cursor::>(()); cursor.seek(&range.start, Bias::Right); let overshoot = range.start.0 - cursor.start().0.0; @@ -1000,7 +1039,12 @@ impl InlaySnapshot { Some(Transform::Inlay(inlay)) => { let suffix_start = overshoot; let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0; - summary = inlay.text().cursor(suffix_start).summary(suffix_end); + summary = MBTextSummary::from( + inlay + .text() + .cursor(suffix_start) + .summary::(suffix_end), + ); cursor.next(); } None => {} @@ -1018,7 +1062,7 @@ impl InlaySnapshot { let prefix_end = prefix_start + overshoot; summary += self .buffer - .text_summary_for_range::(prefix_start..prefix_end); + .text_summary_for_range::(prefix_start..prefix_end); } Some(Transform::Inlay(inlay)) => { let prefix_end = overshoot; @@ -1031,6 +1075,7 @@ impl InlaySnapshot { summary } + #[ztracing::instrument(skip_all)] pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> { let mut cursor = self.transforms.cursor::>(()); let inlay_point = InlayPoint::new(row, 0); @@ -1058,6 +1103,7 @@ impl InlaySnapshot { } } + #[ztracing::instrument(skip_all)] pub fn line_len(&self, row: u32) -> u32 { let line_start = self.to_offset(InlayPoint::new(row, 0)).0; let line_end = if row >= self.max_point().row() { @@ -1068,13 +1114,16 @@ impl InlaySnapshot { (line_end - line_start) as u32 } + #[ztracing::instrument(skip_all)] pub(crate) fn chunks<'a>( &'a self, range: Range, language_aware: bool, highlights: Highlights<'a>, ) -> InlayChunks<'a> { - let mut cursor = self.transforms.cursor::>(()); + let mut cursor = self + .transforms + .cursor::>(()); cursor.seek(&range.start, Bias::Right); let buffer_range = self.to_buffer_offset(range.start)..self.to_buffer_offset(range.end); @@ -1100,12 +1149,14 @@ impl InlaySnapshot { } #[cfg(test)] + #[ztracing::instrument(skip_all)] pub fn text(&self) -> String { self.chunks(Default::default()..self.len(), false, Highlights::default()) .map(|chunk| chunk.chunk.text) .collect() } + #[ztracing::instrument(skip_all)] fn check_invariants(&self) { #[cfg(any(debug_assertions, feature = "test-support"))] { @@ -1126,8 +1177,54 @@ impl InlaySnapshot { } } -fn push_isomorphic(sum_tree: &mut SumTree, summary: TextSummary) { - if summary.len == 0 { +pub struct InlayPointCursor<'transforms> { + cursor: Cursor<'transforms, 'static, Transform, Dimensions>, + transforms: &'transforms SumTree, +} + +impl InlayPointCursor<'_> { + #[ztracing::instrument(skip_all)] + pub fn map(&mut self, point: Point) -> InlayPoint { + let cursor = &mut self.cursor; + if cursor.did_seek() { + cursor.seek_forward(&point, Bias::Left); + } else { + cursor.seek(&point, Bias::Left); + } + loop { + match cursor.item() { + Some(Transform::Isomorphic(_)) => { + if point == cursor.end().0 { + while let Some(Transform::Inlay(inlay)) = cursor.next_item() { + if inlay.position.bias() == Bias::Right { + break; + } else { + cursor.next(); + } + } + return cursor.end().1; + } else { + let overshoot = point - cursor.start().0; + return InlayPoint(cursor.start().1.0 + overshoot); + } + } + Some(Transform::Inlay(inlay)) => { + if inlay.position.bias() == Bias::Left { + cursor.next(); + } else { + return cursor.start().1; + } + } + None => { + return InlayPoint(self.transforms.summary().output.lines); + } + } + } + } +} + +fn push_isomorphic(sum_tree: &mut SumTree, summary: MBTextSummary) { + if summary.len == MultiBufferOffset(0) { return; } @@ -1146,31 +1243,6 @@ fn push_isomorphic(sum_tree: &mut SumTree, summary: TextSummary) { } } -/// Given a byte index that is NOT a UTF-8 boundary, find the next one. -/// Assumes: 0 < byte_index < text.len() and !text.is_char_boundary(byte_index) -#[inline(always)] -fn find_next_utf8_boundary(text: &str, byte_index: usize) -> usize { - let bytes = text.as_bytes(); - let mut idx = byte_index + 1; - - // Scan forward until we find a boundary - while idx < text.len() { - if is_utf8_char_boundary(bytes[idx]) { - return idx; - } - idx += 1; - } - - // Hit the end, return the full length - text.len() -} - -// Private helper function taken from Rust's core::num module (which is both Apache2 and MIT licensed) -const fn is_utf8_char_boundary(byte: u8) -> bool { - // This is bit magic equivalent to: b < 128 || b >= 192 - (byte as i8) >= -0x40 -} - #[cfg(test)] mod tests { use super::*; @@ -1308,7 +1380,10 @@ mod tests { &[], vec![Inlay::mock_hint( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_after(3), + buffer + .read(cx) + .snapshot(cx) + .anchor_after(MultiBufferOffset(3)), "|123|", )], ); @@ -1364,7 +1439,15 @@ mod tests { // Edits before or after the inlay should not affect it. buffer.update(cx, |buffer, cx| { - buffer.edit([(2..3, "x"), (3..3, "y"), (4..4, "z")], None, cx) + buffer.edit( + [ + (MultiBufferOffset(2)..MultiBufferOffset(3), "x"), + (MultiBufferOffset(3)..MultiBufferOffset(3), "y"), + (MultiBufferOffset(4)..MultiBufferOffset(4), "z"), + ], + None, + cx, + ) }); let (inlay_snapshot, _) = inlay_map.sync( buffer.read(cx).snapshot(cx), @@ -1373,7 +1456,13 @@ mod tests { assert_eq!(inlay_snapshot.text(), "abxy|123|dzefghi"); // An edit surrounding the inlay should invalidate it. - buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "D")], None, cx)); + buffer.update(cx, |buffer, cx| { + buffer.edit( + [(MultiBufferOffset(4)..MultiBufferOffset(5), "D")], + None, + cx, + ) + }); let (inlay_snapshot, _) = inlay_map.sync( buffer.read(cx).snapshot(cx), buffer_edits.consume().into_inner(), @@ -1385,12 +1474,18 @@ mod tests { vec![ Inlay::mock_hint( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_before(3), + buffer + .read(cx) + .snapshot(cx) + .anchor_before(MultiBufferOffset(3)), "|123|", ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_after(3), + buffer + .read(cx) + .snapshot(cx) + .anchor_after(MultiBufferOffset(3)), "|456|", ), ], @@ -1398,7 +1493,13 @@ mod tests { assert_eq!(inlay_snapshot.text(), "abx|123||456|yDzefghi"); // Edits ending where the inlay starts should not move it if it has a left bias. - buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "JKL")], None, cx)); + buffer.update(cx, |buffer, cx| { + buffer.edit( + [(MultiBufferOffset(3)..MultiBufferOffset(3), "JKL")], + None, + cx, + ) + }); let (inlay_snapshot, _) = inlay_map.sync( buffer.read(cx).snapshot(cx), buffer_edits.consume().into_inner(), @@ -1600,17 +1701,26 @@ mod tests { vec![ Inlay::mock_hint( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_before(0), + buffer + .read(cx) + .snapshot(cx) + .anchor_before(MultiBufferOffset(0)), "|123|\n", ), Inlay::mock_hint( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_before(4), + buffer + .read(cx) + .snapshot(cx) + .anchor_before(MultiBufferOffset(4)), "|456|", ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), - buffer.read(cx).snapshot(cx).anchor_before(7), + buffer + .read(cx) + .snapshot(cx) + .anchor_before(MultiBufferOffset(7)), "\n|567|\n", ), ], @@ -1687,7 +1797,7 @@ mod tests { .collect::>(); let mut expected_text = Rope::from(&buffer_snapshot.text()); for (offset, inlay) in inlays.iter().rev() { - expected_text.replace(*offset..*offset, &inlay.text().to_string()); + expected_text.replace(offset.0..offset.0, &inlay.text().to_string()); } assert_eq!(inlay_snapshot.text(), expected_text.to_string()); @@ -1710,7 +1820,7 @@ mod tests { let mut text_highlights = TextHighlights::default(); let text_highlight_count = rng.random_range(0_usize..10); let mut text_highlight_ranges = (0..text_highlight_count) - .map(|_| buffer_snapshot.random_byte_range(0, &mut rng)) + .map(|_| buffer_snapshot.random_byte_range(MultiBufferOffset(0), &mut rng)) .collect::>(); text_highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end))); log::info!("highlighting text ranges {text_highlight_ranges:?}"); @@ -1773,12 +1883,13 @@ mod tests { } for _ in 0..5 { - let mut end = rng.random_range(0..=inlay_snapshot.len().0); + let mut end = rng.random_range(0..=inlay_snapshot.len().0.0); end = expected_text.clip_offset(end, Bias::Right); let mut start = rng.random_range(0..=end); start = expected_text.clip_offset(start, Bias::Right); - let range = InlayOffset(start)..InlayOffset(end); + let range = + InlayOffset(MultiBufferOffset(start))..InlayOffset(MultiBufferOffset(end)); log::info!("calling inlay_snapshot.chunks({range:?})"); let actual_text = inlay_snapshot .chunks( @@ -1800,25 +1911,27 @@ mod tests { ); assert_eq!( - inlay_snapshot.text_summary_for_range(InlayOffset(start)..InlayOffset(end)), - expected_text.slice(start..end).summary() + inlay_snapshot.text_summary_for_range( + InlayOffset(MultiBufferOffset(start))..InlayOffset(MultiBufferOffset(end)) + ), + MBTextSummary::from(expected_text.slice(start..end).summary()) ); } for edit in inlay_edits { prev_inlay_text.replace_range( - edit.new.start.0..edit.new.start.0 + edit.old_len().0, - &inlay_snapshot.text()[edit.new.start.0..edit.new.end.0], + edit.new.start.0.0..edit.new.start.0.0 + edit.old_len(), + &inlay_snapshot.text()[edit.new.start.0.0..edit.new.end.0.0], ); } assert_eq!(prev_inlay_text, inlay_snapshot.text()); assert_eq!(expected_text.max_point(), inlay_snapshot.max_point().0); - assert_eq!(expected_text.len(), inlay_snapshot.len().0); + assert_eq!(expected_text.len(), inlay_snapshot.len().0.0); let mut buffer_point = Point::default(); let mut inlay_point = inlay_snapshot.to_inlay_point(buffer_point); - let mut buffer_chars = buffer_snapshot.chars_at(0); + let mut buffer_chars = buffer_snapshot.chars_at(MultiBufferOffset(0)); loop { // Ensure conversion from buffer coordinates to inlay coordinates // is consistent. @@ -1959,7 +2072,7 @@ mod tests { // Get all chunks and verify their bitmaps let chunks = snapshot.chunks( - InlayOffset(0)..InlayOffset(snapshot.len().0), + InlayOffset(MultiBufferOffset(0))..snapshot.len(), false, Highlights::default(), ); @@ -2093,7 +2206,7 @@ mod tests { // Collect chunks - this previously would panic let chunks: Vec<_> = inlay_snapshot .chunks( - InlayOffset(0)..InlayOffset(inlay_snapshot.len().0), + InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(), false, highlights, ) @@ -2207,7 +2320,7 @@ mod tests { let chunks: Vec<_> = inlay_snapshot .chunks( - InlayOffset(0)..InlayOffset(inlay_snapshot.len().0), + InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(), false, highlights, ) diff --git a/crates/editor/src/display_map/invisibles.rs b/crates/editor/src/display_map/invisibles.rs index 5622a659b7acf850d24f6a476b23b53d214d855d..90bd54ab2807bbef703ac29e4ac4eaf49bcf71fd 100644 --- a/crates/editor/src/display_map/invisibles.rs +++ b/crates/editor/src/display_map/invisibles.rs @@ -30,6 +30,7 @@ // ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1 // ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt // https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt +#[ztracing::instrument(skip_all)] pub fn is_invisible(c: char) -> bool { if c <= '\u{1f}' { c != '\t' && c != '\n' && c != '\r' diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index ab3bddf7278605e89b816831059de73873853b32..4e768a477159820ea380aa48a123d103c0c2f6a2 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -20,6 +20,7 @@ const MAX_TABS: NonZeroU32 = NonZeroU32::new(SPACES.len() as u32).unwrap(); pub struct TabMap(TabSnapshot); impl TabMap { + #[ztracing::instrument(skip_all)] pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) { let snapshot = TabSnapshot { fold_snapshot, @@ -36,6 +37,7 @@ impl TabMap { self.0.clone() } + #[ztracing::instrument(skip_all)] pub fn sync( &mut self, fold_snapshot: FoldSnapshot, @@ -137,10 +139,10 @@ impl TabMap { let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); TabEdit { - old: old_snapshot.to_tab_point(old_start) - ..old_snapshot.to_tab_point(old_end), - new: new_snapshot.to_tab_point(new_start) - ..new_snapshot.to_tab_point(new_end), + old: old_snapshot.fold_point_to_tab_point(old_start) + ..old_snapshot.fold_point_to_tab_point(old_end), + new: new_snapshot.fold_point_to_tab_point(new_start) + ..new_snapshot.fold_point_to_tab_point(new_end), } }) .collect() @@ -176,14 +178,16 @@ impl std::ops::Deref for TabSnapshot { } impl TabSnapshot { + #[ztracing::instrument(skip_all)] pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { &self.fold_snapshot.inlay_snapshot.buffer } + #[ztracing::instrument(skip_all)] pub fn line_len(&self, row: u32) -> u32 { let max_point = self.max_point(); if row < max_point.row() { - self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row))) + self.fold_point_to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row))) .0 .column } else { @@ -191,13 +195,15 @@ impl TabSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn text_summary(&self) -> TextSummary { self.text_summary_for_range(TabPoint::zero()..self.max_point()) } + #[ztracing::instrument(skip_all, fields(rows))] pub fn text_summary_for_range(&self, range: Range) -> TextSummary { - let input_start = self.to_fold_point(range.start, Bias::Left).0; - let input_end = self.to_fold_point(range.end, Bias::Right).0; + let input_start = self.tab_point_to_fold_point(range.start, Bias::Left).0; + let input_end = self.tab_point_to_fold_point(range.end, Bias::Right).0; let input_summary = self .fold_snapshot .text_summary_for_range(input_start..input_end); @@ -234,6 +240,7 @@ impl TabSnapshot { } } + #[ztracing::instrument(skip_all)] pub(crate) fn chunks<'a>( &'a self, range: Range, @@ -241,11 +248,11 @@ impl TabSnapshot { highlights: Highlights<'a>, ) -> TabChunks<'a> { let (input_start, expanded_char_column, to_next_stop) = - self.to_fold_point(range.start, Bias::Left); + self.tab_point_to_fold_point(range.start, Bias::Left); let input_column = input_start.column(); let input_start = input_start.to_offset(&self.fold_snapshot); let input_end = self - .to_fold_point(range.end, Bias::Right) + .tab_point_to_fold_point(range.end, Bias::Right) .0 .to_offset(&self.fold_snapshot); let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 { @@ -276,11 +283,13 @@ impl TabSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn rows(&self, row: u32) -> fold_map::FoldRows<'_> { self.fold_snapshot.row_infos(row) } #[cfg(test)] + #[ztracing::instrument(skip_all)] pub fn text(&self) -> String { self.chunks( TabPoint::zero()..self.max_point(), @@ -291,25 +300,34 @@ impl TabSnapshot { .collect() } + #[ztracing::instrument(skip_all)] pub fn max_point(&self) -> TabPoint { - self.to_tab_point(self.fold_snapshot.max_point()) + self.fold_point_to_tab_point(self.fold_snapshot.max_point()) } + #[ztracing::instrument(skip_all)] pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint { - self.to_tab_point( + self.fold_point_to_tab_point( self.fold_snapshot - .clip_point(self.to_fold_point(point, bias).0, bias), + .clip_point(self.tab_point_to_fold_point(point, bias).0, bias), ) } - pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint { + #[ztracing::instrument(skip_all)] + pub fn fold_point_to_tab_point(&self, input: FoldPoint) -> TabPoint { let chunks = self.fold_snapshot.chunks_at(FoldPoint::new(input.row(), 0)); let tab_cursor = TabStopCursor::new(chunks); let expanded = self.expand_tabs(tab_cursor, input.column()); TabPoint::new(input.row(), expanded) } - pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) { + #[ztracing::instrument(skip_all)] + pub fn tab_point_cursor(&self) -> TabPointCursor<'_> { + TabPointCursor { this: self } + } + + #[ztracing::instrument(skip_all)] + pub fn tab_point_to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) { let chunks = self .fold_snapshot .chunks_at(FoldPoint::new(output.row(), 0)); @@ -326,20 +344,23 @@ impl TabSnapshot { ) } - pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint { + #[ztracing::instrument(skip_all)] + pub fn point_to_tab_point(&self, point: Point, bias: Bias) -> TabPoint { let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point); let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias); - self.to_tab_point(fold_point) + self.fold_point_to_tab_point(fold_point) } - pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point { - let fold_point = self.to_fold_point(point, bias).0; + #[ztracing::instrument(skip_all)] + pub fn tab_point_to_point(&self, point: TabPoint, bias: Bias) -> Point { + let fold_point = self.tab_point_to_fold_point(point, bias).0; let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); self.fold_snapshot .inlay_snapshot .to_buffer_point(inlay_point) } + #[ztracing::instrument(skip_all)] fn expand_tabs<'a, I>(&self, mut cursor: TabStopCursor<'a, I>, column: u32) -> u32 where I: Iterator>, @@ -373,6 +394,7 @@ impl TabSnapshot { expanded_bytes + column.saturating_sub(collapsed_bytes) } + #[ztracing::instrument(skip_all)] fn collapse_tabs<'a, I>( &self, mut cursor: TabStopCursor<'a, I>, @@ -432,6 +454,18 @@ impl TabSnapshot { } } +// todo(lw): Implement TabPointCursor properly +pub struct TabPointCursor<'this> { + this: &'this TabSnapshot, +} + +impl TabPointCursor<'_> { + #[ztracing::instrument(skip_all)] + pub fn map(&mut self, point: FoldPoint) -> TabPoint { + self.this.fold_point_to_tab_point(point) + } +} + #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] pub struct TabPoint(pub Point); @@ -471,6 +505,7 @@ pub struct TextSummary { } impl<'a> From<&'a str> for TextSummary { + #[ztracing::instrument(skip_all)] fn from(text: &'a str) -> Self { let sum = text::TextSummary::from(text); @@ -485,6 +520,7 @@ impl<'a> From<&'a str> for TextSummary { } impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { + #[ztracing::instrument(skip_all)] fn add_assign(&mut self, other: &'a Self) { let joined_chars = self.last_line_chars + other.first_line_chars; if joined_chars > self.longest_row_chars { @@ -526,14 +562,16 @@ pub struct TabChunks<'a> { } impl TabChunks<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn seek(&mut self, range: Range) { - let (input_start, expanded_char_column, to_next_stop) = - self.snapshot.to_fold_point(range.start, Bias::Left); + let (input_start, expanded_char_column, to_next_stop) = self + .snapshot + .tab_point_to_fold_point(range.start, Bias::Left); let input_column = input_start.column(); let input_start = input_start.to_offset(&self.snapshot.fold_snapshot); let input_end = self .snapshot - .to_fold_point(range.end, Bias::Right) + .tab_point_to_fold_point(range.end, Bias::Right) .0 .to_offset(&self.snapshot.fold_snapshot); let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 { @@ -560,6 +598,7 @@ impl TabChunks<'_> { impl<'a> Iterator for TabChunks<'a> { type Item = Chunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.chunk.text.is_empty() { if let Some(chunk) = self.fold_chunks.next() { @@ -648,6 +687,7 @@ mod tests { inlay_map::InlayMap, }, }; + use multi_buffer::MultiBufferOffset; use rand::{Rng, prelude::StdRng}; use util; @@ -803,23 +843,23 @@ mod tests { assert_eq!( tab_snapshot.expected_to_fold_point(range.start, Bias::Left), - tab_snapshot.to_fold_point(range.start, Bias::Left), + tab_snapshot.tab_point_to_fold_point(range.start, Bias::Left), "Failed with tab_point at column {ix}" ); assert_eq!( tab_snapshot.expected_to_fold_point(range.start, Bias::Right), - tab_snapshot.to_fold_point(range.start, Bias::Right), + tab_snapshot.tab_point_to_fold_point(range.start, Bias::Right), "Failed with tab_point at column {ix}" ); assert_eq!( tab_snapshot.expected_to_fold_point(range.end, Bias::Left), - tab_snapshot.to_fold_point(range.end, Bias::Left), + tab_snapshot.tab_point_to_fold_point(range.end, Bias::Left), "Failed with tab_point at column {ix}" ); assert_eq!( tab_snapshot.expected_to_fold_point(range.end, Bias::Right), - tab_snapshot.to_fold_point(range.end, Bias::Right), + tab_snapshot.tab_point_to_fold_point(range.end, Bias::Right), "Failed with tab_point at column {ix}" ); } @@ -839,7 +879,7 @@ mod tests { // This should panic with the expected vs actual mismatch let tab_point = TabPoint::new(0, 9); - let result = tab_snapshot.to_fold_point(tab_point, Bias::Left); + let result = tab_snapshot.tab_point_to_fold_point(tab_point, Bias::Left); let expected = tab_snapshot.expected_to_fold_point(tab_point, Bias::Left); assert_eq!(result, expected); @@ -883,26 +923,26 @@ mod tests { assert_eq!( tab_snapshot.expected_to_fold_point(range.start, Bias::Left), - tab_snapshot.to_fold_point(range.start, Bias::Left), + tab_snapshot.tab_point_to_fold_point(range.start, Bias::Left), "Failed with input: {}, with idx: {ix}", input ); assert_eq!( tab_snapshot.expected_to_fold_point(range.start, Bias::Right), - tab_snapshot.to_fold_point(range.start, Bias::Right), + tab_snapshot.tab_point_to_fold_point(range.start, Bias::Right), "Failed with input: {}, with idx: {ix}", input ); assert_eq!( tab_snapshot.expected_to_fold_point(range.end, Bias::Left), - tab_snapshot.to_fold_point(range.end, Bias::Left), + tab_snapshot.tab_point_to_fold_point(range.end, Bias::Left), "Failed with input: {}, with idx: {ix}", input ); assert_eq!( tab_snapshot.expected_to_fold_point(range.end, Bias::Right), - tab_snapshot.to_fold_point(range.end, Bias::Right), + tab_snapshot.tab_point_to_fold_point(range.end, Bias::Right), "Failed with input: {}, with idx: {ix}", input ); @@ -942,13 +982,13 @@ mod tests { let input_point = Point::new(0, ix as u32); let output_point = Point::new(0, output.find(c).unwrap() as u32); assert_eq!( - tab_snapshot.to_tab_point(FoldPoint(input_point)), + tab_snapshot.fold_point_to_tab_point(FoldPoint(input_point)), TabPoint(output_point), "to_tab_point({input_point:?})" ); assert_eq!( tab_snapshot - .to_fold_point(TabPoint(output_point), Bias::Left) + .tab_point_to_fold_point(TabPoint(output_point), Bias::Left) .0, FoldPoint(input_point), "to_fold_point({output_point:?})" @@ -1137,7 +1177,7 @@ mod tests { let column = rng.random_range(0..=max_column + 10); let fold_point = FoldPoint::new(row, column); - let actual = tab_snapshot.to_tab_point(fold_point); + let actual = tab_snapshot.fold_point_to_tab_point(fold_point); let expected = tab_snapshot.expected_to_tab_point(fold_point); assert_eq!( @@ -1156,7 +1196,7 @@ mod tests { let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let chunks = fold_snapshot.chunks( - FoldOffset(0)..fold_snapshot.len(), + FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(), false, Default::default(), ); @@ -1318,7 +1358,7 @@ mod tests { let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let chunks = fold_snapshot.chunks( - FoldOffset(0)..fold_snapshot.len(), + FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(), false, Default::default(), ); @@ -1435,6 +1475,7 @@ impl<'a, I> TabStopCursor<'a, I> where I: Iterator>, { + #[ztracing::instrument(skip_all)] fn new(chunks: impl IntoIterator, IntoIter = I>) -> Self { Self { chunks: chunks.into_iter(), @@ -1444,6 +1485,7 @@ where } } + #[ztracing::instrument(skip_all)] fn bytes_until_next_char(&self) -> Option { self.current_chunk.as_ref().and_then(|(chunk, idx)| { let mut idx = *idx; @@ -1465,6 +1507,7 @@ where }) } + #[ztracing::instrument(skip_all)] fn is_char_boundary(&self) -> bool { self.current_chunk .as_ref() @@ -1472,6 +1515,7 @@ where } /// distance: length to move forward while searching for the next tab stop + #[ztracing::instrument(skip_all)] fn seek(&mut self, distance: u32) -> Option { if distance == 0 { return None; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 6d9704b5f93c0ce48d413babdd59997b02f093e6..879ca11be1a84ffd44daa6e53677b06887172026 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -86,6 +86,7 @@ pub struct WrapRows<'a> { } impl WrapRows<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn seek(&mut self, start_row: WrapRow) { self.transforms .seek(&WrapPoint::new(start_row, 0), Bias::Left); @@ -101,6 +102,7 @@ impl WrapRows<'_> { } impl WrapMap { + #[ztracing::instrument(skip_all)] pub fn new( tab_snapshot: TabSnapshot, font: Font, @@ -131,6 +133,7 @@ impl WrapMap { self.background_task.is_some() } + #[ztracing::instrument(skip_all)] pub fn sync( &mut self, tab_snapshot: TabSnapshot, @@ -150,6 +153,7 @@ impl WrapMap { (self.snapshot.clone(), mem::take(&mut self.edits_since_sync)) } + #[ztracing::instrument(skip_all)] pub fn set_font_with_size( &mut self, font: Font, @@ -167,6 +171,7 @@ impl WrapMap { } } + #[ztracing::instrument(skip_all)] pub fn set_wrap_width(&mut self, wrap_width: Option, cx: &mut Context) -> bool { if wrap_width == self.wrap_width { return false; @@ -177,6 +182,7 @@ impl WrapMap { true } + #[ztracing::instrument(skip_all)] fn rewrap(&mut self, cx: &mut Context) { self.background_task.take(); self.interpolated_edits.clear(); @@ -248,6 +254,7 @@ impl WrapMap { } } + #[ztracing::instrument(skip_all)] fn flush_edits(&mut self, cx: &mut Context) { if !self.snapshot.interpolated { let mut to_remove_len = 0; @@ -330,6 +337,7 @@ impl WrapMap { } impl WrapSnapshot { + #[ztracing::instrument(skip_all)] fn new(tab_snapshot: TabSnapshot) -> Self { let mut transforms = SumTree::default(); let extent = tab_snapshot.text_summary(); @@ -343,10 +351,12 @@ impl WrapSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { self.tab_snapshot.buffer_snapshot() } + #[ztracing::instrument(skip_all)] fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> WrapPatch { let mut new_transforms; if tab_edits.is_empty() { @@ -411,6 +421,7 @@ impl WrapSnapshot { old_snapshot.compute_edits(tab_edits, self) } + #[ztracing::instrument(skip_all)] async fn update( &mut self, new_tab_snapshot: TabSnapshot, @@ -570,6 +581,7 @@ impl WrapSnapshot { old_snapshot.compute_edits(tab_edits, self) } + #[ztracing::instrument(skip_all)] fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> WrapPatch { let mut wrap_edits = Vec::with_capacity(tab_edits.len()); let mut old_cursor = self.transforms.cursor::(()); @@ -606,6 +618,7 @@ impl WrapSnapshot { Patch::new(wrap_edits) } + #[ztracing::instrument(skip_all)] pub(crate) fn chunks<'a>( &'a self, rows: Range, @@ -622,9 +635,10 @@ impl WrapSnapshot { if transforms.item().is_some_and(|t| t.is_isomorphic()) { input_start.0 += output_start.0 - transforms.start().0.0; } - let input_end = self - .to_tab_point(output_end) - .min(self.tab_snapshot.max_point()); + let input_end = self.to_tab_point(output_end); + let max_point = self.tab_snapshot.max_point(); + let input_start = input_start.min(max_point); + let input_end = input_end.min(max_point); WrapChunks { input_chunks: self.tab_snapshot.chunks( input_start..input_end, @@ -639,10 +653,12 @@ impl WrapSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn max_point(&self) -> WrapPoint { WrapPoint(self.transforms.summary().output.lines) } + #[ztracing::instrument(skip_all)] pub fn line_len(&self, row: WrapRow) -> u32 { let (start, _, item) = self.transforms.find::, _>( (), @@ -663,6 +679,7 @@ impl WrapSnapshot { } } + #[ztracing::instrument(skip_all, fields(rows))] pub fn text_summary_for_range(&self, rows: Range) -> TextSummary { let mut summary = TextSummary::default(); @@ -724,6 +741,7 @@ impl WrapSnapshot { summary } + #[ztracing::instrument(skip_all)] pub fn soft_wrap_indent(&self, row: WrapRow) -> Option { let (.., item) = self.transforms.find::( (), @@ -739,10 +757,12 @@ impl WrapSnapshot { }) } + #[ztracing::instrument(skip_all)] pub fn longest_row(&self) -> u32 { self.transforms.summary().output.longest_row } + #[ztracing::instrument(skip_all)] pub fn row_infos(&self, start_row: WrapRow) -> WrapRows<'_> { let mut transforms = self .transforms @@ -765,6 +785,7 @@ impl WrapSnapshot { } } + #[ztracing::instrument(skip_all)] pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { let (start, _, item) = self.transforms @@ -776,14 +797,18 @@ impl WrapSnapshot { TabPoint(tab_point) } + #[ztracing::instrument(skip_all)] pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point { - self.tab_snapshot.to_point(self.to_tab_point(point), bias) + self.tab_snapshot + .tab_point_to_point(self.to_tab_point(point), bias) } + #[ztracing::instrument(skip_all)] pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint { - self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias)) + self.tab_point_to_wrap_point(self.tab_snapshot.point_to_tab_point(point, bias)) } + #[ztracing::instrument(skip_all)] pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { let (start, ..) = self.transforms @@ -791,6 +816,16 @@ impl WrapSnapshot { WrapPoint(start.1.0 + (point.0 - start.0.0)) } + #[ztracing::instrument(skip_all)] + pub fn wrap_point_cursor(&self) -> WrapPointCursor<'_> { + WrapPointCursor { + cursor: self + .transforms + .cursor::>(()), + } + } + + #[ztracing::instrument(skip_all)] pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { let (start, _, item) = self @@ -805,32 +840,65 @@ impl WrapSnapshot { self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias)) } - pub fn prev_row_boundary(&self, mut point: WrapPoint) -> WrapRow { + /// Try to find a TabRow start that is also a WrapRow start + /// Every TabRow start is a WrapRow start + #[ztracing::instrument(skip_all, fields(point=?point))] + pub fn prev_row_boundary(&self, point: WrapPoint) -> WrapRow { if self.transforms.is_empty() { return WrapRow(0); } - *point.column_mut() = 0; + let point = WrapPoint::new(point.row(), 0); let mut cursor = self .transforms .cursor::>(()); + cursor.seek(&point, Bias::Right); if cursor.item().is_none() { cursor.prev(); } + // real newline fake fake + // text: helloworldasldlfjasd\njdlasfalsk\naskdjfasdkfj\n + // dimensions v v v v v + // transforms |-------|-----NW----|-----W------|-----W------| + // cursor ^ ^^^^^^^^^^^^^ ^ + // (^) ^^^^^^^^^^^^^^ + // point: ^ + // point(col_zero): (^) + while let Some(transform) = cursor.item() { - if transform.is_isomorphic() && cursor.start().1.column() == 0 { - return cmp::min(cursor.end().0.row(), point.row()); - } else { - cursor.prev(); + if transform.is_isomorphic() { + // this transform only has real linefeeds + let tab_summary = &transform.summary.input; + // is the wrap just before the end of the transform a tab row? + // thats only if this transform has at least one newline + // + // "this wrap row is a tab row" <=> self.to_tab_point(WrapPoint::new(wrap_row, 0)).column() == 0 + + // Note on comparison: + // We have code that relies on this to be row > 1 + // It should work with row >= 1 but it does not :( + // + // That means that if every line is wrapped we walk back all the + // way to the start. Which invalidates the entire state triggering + // a full re-render. + if tab_summary.lines.row > 1 { + let wrap_point_at_end = cursor.end().0.row(); + return cmp::min(wrap_point_at_end - RowDelta(1), point.row()); + } else if cursor.start().1.column() == 0 { + return cmp::min(cursor.end().0.row(), point.row()); + } } + + cursor.prev(); } - unreachable!() + WrapRow(0) } + #[ztracing::instrument(skip_all)] pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { point.0 += Point::new(1, 0); @@ -864,6 +932,7 @@ impl WrapSnapshot { .map(|h| h.text) } + #[ztracing::instrument(skip_all)] fn check_invariants(&self) { #[cfg(test)] { @@ -912,7 +981,25 @@ impl WrapSnapshot { } } +pub struct WrapPointCursor<'transforms> { + cursor: Cursor<'transforms, 'static, Transform, Dimensions>, +} + +impl WrapPointCursor<'_> { + #[ztracing::instrument(skip_all)] + pub fn map(&mut self, point: TabPoint) -> WrapPoint { + let cursor = &mut self.cursor; + if cursor.did_seek() { + cursor.seek_forward(&point, Bias::Right); + } else { + cursor.seek(&point, Bias::Right); + } + WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0)) + } +} + impl WrapChunks<'_> { + #[ztracing::instrument(skip_all)] pub(crate) fn seek(&mut self, rows: Range) { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); @@ -921,10 +1008,10 @@ impl WrapChunks<'_> { if self.transforms.item().is_some_and(|t| t.is_isomorphic()) { input_start.0 += output_start.0 - self.transforms.start().0.0; } - let input_end = self - .snapshot - .to_tab_point(output_end) - .min(self.snapshot.tab_snapshot.max_point()); + let input_end = self.snapshot.to_tab_point(output_end); + let max_point = self.snapshot.tab_snapshot.max_point(); + let input_start = input_start.min(max_point); + let input_end = input_end.min(max_point); self.input_chunks.seek(input_start..input_end); self.input_chunk = Chunk::default(); self.output_position = output_start; @@ -935,6 +1022,7 @@ impl WrapChunks<'_> { impl<'a> Iterator for WrapChunks<'a> { type Item = Chunk<'a>; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.output_position.row() >= self.max_output_row { return None; @@ -1007,6 +1095,7 @@ impl<'a> Iterator for WrapChunks<'a> { impl Iterator for WrapRows<'_> { type Item = RowInfo; + #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { if self.output_row > self.max_output_row { return None; @@ -1030,6 +1119,7 @@ impl Iterator for WrapRows<'_> { RowInfo { buffer_id: None, buffer_row: None, + base_text_row: None, multibuffer_row: None, diff_status, expand_info: None, @@ -1042,6 +1132,7 @@ impl Iterator for WrapRows<'_> { } impl Transform { + #[ztracing::instrument(skip_all)] fn isomorphic(summary: TextSummary) -> Self { #[cfg(test)] assert!(!summary.lines.is_zero()); @@ -1055,6 +1146,7 @@ impl Transform { } } + #[ztracing::instrument(skip_all)] fn wrap(indent: u32) -> Self { static WRAP_TEXT: LazyLock = LazyLock::new(|| { let mut wrap_text = String::new(); @@ -1107,6 +1199,7 @@ trait SumTreeExt { } impl SumTreeExt for SumTree { + #[ztracing::instrument(skip_all)] fn push_or_extend(&mut self, transform: Transform) { let mut transform = Some(transform); self.update_last( @@ -1170,6 +1263,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { } impl sum_tree::SeekTarget<'_, TransformSummary, TransformSummary> for TabPoint { + #[ztracing::instrument(skip_all)] fn cmp(&self, cursor_location: &TransformSummary, _: ()) -> std::cmp::Ordering { Ord::cmp(&self.0, &cursor_location.input.lines) } @@ -1229,6 +1323,71 @@ mod tests { use text::Rope; use theme::LoadThemes; + #[gpui::test] + async fn test_prev_row_boundary(cx: &mut gpui::TestAppContext) { + init_test(cx); + + fn test_wrap_snapshot( + text: &str, + soft_wrap_every: usize, // font size multiple + cx: &mut gpui::TestAppContext, + ) -> WrapSnapshot { + let text_system = cx.read(|cx| cx.text_system().clone()); + let tab_size = 4.try_into().unwrap(); + let font = test_font(); + let _font_id = text_system.resolve_font(&font); + let font_size = px(14.0); + // this is very much an estimate to try and get the wrapping to + // occur at `soft_wrap_every` we check that it pans out for every test case + let soft_wrapping = Some(font_size * soft_wrap_every * 0.6); + + let buffer = cx.new(|cx| language::Buffer::local(text, cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + let (_inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot); + let (_fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); + let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size); + let tabs_snapshot = tab_map.set_max_expansion_column(32); + let (_wrap_map, wrap_snapshot) = + cx.update(|cx| WrapMap::new(tabs_snapshot, font, font_size, soft_wrapping, cx)); + + wrap_snapshot + } + + // These two should pass but dont, see the comparison note in + // prev_row_boundary about why. + // + // // 0123 4567 wrap_rows + // let wrap_snapshot = test_wrap_snapshot("1234\n5678", 1, cx); + // assert_eq!(wrap_snapshot.text(), "1\n2\n3\n4\n5\n6\n7\n8"); + // let row = wrap_snapshot.prev_row_boundary(wrap_snapshot.max_point()); + // assert_eq!(row.0, 3); + + // // 012 345 678 wrap_rows + // let wrap_snapshot = test_wrap_snapshot("123\n456\n789", 1, cx); + // assert_eq!(wrap_snapshot.text(), "1\n2\n3\n4\n5\n6\n7\n8\n9"); + // let row = wrap_snapshot.prev_row_boundary(wrap_snapshot.max_point()); + // assert_eq!(row.0, 5); + + // 012345678 wrap_rows + let wrap_snapshot = test_wrap_snapshot("123456789", 1, cx); + assert_eq!(wrap_snapshot.text(), "1\n2\n3\n4\n5\n6\n7\n8\n9"); + let row = wrap_snapshot.prev_row_boundary(wrap_snapshot.max_point()); + assert_eq!(row.0, 0); + + // 111 2222 44 wrap_rows + let wrap_snapshot = test_wrap_snapshot("123\n4567\n\n89", 4, cx); + assert_eq!(wrap_snapshot.text(), "123\n4567\n\n89"); + let row = wrap_snapshot.prev_row_boundary(wrap_snapshot.max_point()); + assert_eq!(row.0, 2); + + // 11 2223 wrap_rows + let wrap_snapshot = test_wrap_snapshot("12\n3456\n\n", 3, cx); + assert_eq!(wrap_snapshot.text(), "12\n345\n6\n\n"); + let row = wrap_snapshot.prev_row_boundary(wrap_snapshot.max_point()); + assert_eq!(row.0, 3); + } + #[gpui::test(iterations = 100)] async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) { // todo this test is flaky diff --git a/crates/editor/src/edit_prediction_tests.rs b/crates/editor/src/edit_prediction_tests.rs index 74f13a404c6a52db448d68eba9e5c255e7276923..b5931cde42a4e2c0e21b2d1f68558879de9750b4 100644 --- a/crates/editor/src/edit_prediction_tests.rs +++ b/crates/editor/src/edit_prediction_tests.rs @@ -1,4 +1,4 @@ -use edit_prediction::EditPredictionProvider; +use edit_prediction_types::EditPredictionDelegate; use gpui::{Entity, KeyBinding, Modifiers, prelude::*}; use indoc::indoc; use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; @@ -15,7 +15,7 @@ async fn test_edit_prediction_insert(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); assign_editor_completion_provider(provider.clone(), &mut cx); cx.set_state("let absolute_zero_celsius = ˇ;"); @@ -37,7 +37,7 @@ async fn test_edit_prediction_modification(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); assign_editor_completion_provider(provider.clone(), &mut cx); cx.set_state("let pi = ˇ\"foo\";"); @@ -59,7 +59,7 @@ async fn test_edit_prediction_jump_button(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); assign_editor_completion_provider(provider.clone(), &mut cx); // Cursor is 2+ lines above the proposed edit @@ -128,7 +128,7 @@ async fn test_edit_prediction_invalidation_range(cx: &mut gpui::TestAppContext) init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); assign_editor_completion_provider(provider.clone(), &mut cx); // Cursor is 3+ lines above the proposed edit @@ -233,7 +233,7 @@ async fn test_edit_prediction_jump_disabled_for_non_zed_providers(cx: &mut gpui: init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeNonZedEditPredictionProvider::default()); + let provider = cx.new(|_| FakeNonZedEditPredictionDelegate::default()); assign_editor_completion_provider_non_zed(provider.clone(), &mut cx); // Cursor is 2+ lines above the proposed edit @@ -281,7 +281,7 @@ async fn test_edit_prediction_preview_cleanup_on_toggle_off(cx: &mut gpui::TestA cx.update(|cx| cx.bind_keys([KeyBinding::new("ctrl-shift-a", AcceptEditPrediction, None)])); let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); assign_editor_completion_provider(provider.clone(), &mut cx); cx.set_state("let x = ˇ;"); @@ -371,7 +371,7 @@ fn accept_completion(cx: &mut EditorTestContext) { } fn propose_edits( - provider: &Entity, + provider: &Entity, edits: Vec<(Range, &str)>, cx: &mut EditorTestContext, ) { @@ -383,7 +383,7 @@ fn propose_edits( cx.update(|_, cx| { provider.update(cx, |provider, _| { - provider.set_edit_prediction(Some(edit_prediction::EditPrediction::Local { + provider.set_edit_prediction(Some(edit_prediction_types::EditPrediction::Local { id: None, edits: edits.collect(), edit_preview: None, @@ -393,7 +393,7 @@ fn propose_edits( } fn assign_editor_completion_provider( - provider: Entity, + provider: Entity, cx: &mut EditorTestContext, ) { cx.update_editor(|editor, window, cx| { @@ -402,7 +402,7 @@ fn assign_editor_completion_provider( } fn propose_edits_non_zed( - provider: &Entity, + provider: &Entity, edits: Vec<(Range, &str)>, cx: &mut EditorTestContext, ) { @@ -414,7 +414,7 @@ fn propose_edits_non_zed( cx.update(|_, cx| { provider.update(cx, |provider, _| { - provider.set_edit_prediction(Some(edit_prediction::EditPrediction::Local { + provider.set_edit_prediction(Some(edit_prediction_types::EditPrediction::Local { id: None, edits: edits.collect(), edit_preview: None, @@ -424,7 +424,7 @@ fn propose_edits_non_zed( } fn assign_editor_completion_provider_non_zed( - provider: Entity, + provider: Entity, cx: &mut EditorTestContext, ) { cx.update_editor(|editor, window, cx| { @@ -433,17 +433,20 @@ fn assign_editor_completion_provider_non_zed( } #[derive(Default, Clone)] -pub struct FakeEditPredictionProvider { - pub completion: Option, +pub struct FakeEditPredictionDelegate { + pub completion: Option, } -impl FakeEditPredictionProvider { - pub fn set_edit_prediction(&mut self, completion: Option) { +impl FakeEditPredictionDelegate { + pub fn set_edit_prediction( + &mut self, + completion: Option, + ) { self.completion = completion; } } -impl EditPredictionProvider for FakeEditPredictionProvider { +impl EditPredictionDelegate for FakeEditPredictionDelegate { fn name() -> &'static str { "fake-completion-provider" } @@ -452,7 +455,7 @@ impl EditPredictionProvider for FakeEditPredictionProvider { "Fake Completion Provider" } - fn show_completions_in_menu() -> bool { + fn show_predictions_in_menu() -> bool { true } @@ -469,7 +472,7 @@ impl EditPredictionProvider for FakeEditPredictionProvider { true } - fn is_refreshing(&self) -> bool { + fn is_refreshing(&self, _cx: &gpui::App) -> bool { false } @@ -482,15 +485,6 @@ impl EditPredictionProvider for FakeEditPredictionProvider { ) { } - fn cycle( - &mut self, - _buffer: gpui::Entity, - _cursor_position: language::Anchor, - _direction: edit_prediction::Direction, - _cx: &mut gpui::Context, - ) { - } - fn accept(&mut self, _cx: &mut gpui::Context) {} fn discard(&mut self, _cx: &mut gpui::Context) {} @@ -500,23 +494,26 @@ impl EditPredictionProvider for FakeEditPredictionProvider { _buffer: &gpui::Entity, _cursor_position: language::Anchor, _cx: &mut gpui::Context, - ) -> Option { + ) -> Option { self.completion.clone() } } #[derive(Default, Clone)] -pub struct FakeNonZedEditPredictionProvider { - pub completion: Option, +pub struct FakeNonZedEditPredictionDelegate { + pub completion: Option, } -impl FakeNonZedEditPredictionProvider { - pub fn set_edit_prediction(&mut self, completion: Option) { +impl FakeNonZedEditPredictionDelegate { + pub fn set_edit_prediction( + &mut self, + completion: Option, + ) { self.completion = completion; } } -impl EditPredictionProvider for FakeNonZedEditPredictionProvider { +impl EditPredictionDelegate for FakeNonZedEditPredictionDelegate { fn name() -> &'static str { "fake-non-zed-provider" } @@ -525,7 +522,7 @@ impl EditPredictionProvider for FakeNonZedEditPredictionProvider { "Fake Non-Zed Provider" } - fn show_completions_in_menu() -> bool { + fn show_predictions_in_menu() -> bool { false } @@ -542,7 +539,7 @@ impl EditPredictionProvider for FakeNonZedEditPredictionProvider { true } - fn is_refreshing(&self) -> bool { + fn is_refreshing(&self, _cx: &gpui::App) -> bool { false } @@ -555,15 +552,6 @@ impl EditPredictionProvider for FakeNonZedEditPredictionProvider { ) { } - fn cycle( - &mut self, - _buffer: gpui::Entity, - _cursor_position: language::Anchor, - _direction: edit_prediction::Direction, - _cx: &mut gpui::Context, - ) { - } - fn accept(&mut self, _cx: &mut gpui::Context) {} fn discard(&mut self, _cx: &mut gpui::Context) {} @@ -573,7 +561,7 @@ impl EditPredictionProvider for FakeNonZedEditPredictionProvider { _buffer: &gpui::Entity, _cursor_position: language::Anchor, _cx: &mut gpui::Context, - ) -> Option { + ) -> Option { self.completion.clone() } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 223dbb776550e949d0ce86dca6f68aff6482433d..8560705802264dad55b87dbf21e1f9aa7625edf8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12,7 +12,8 @@ //! //! If you're looking to improve Vim mode, you should check out Vim crate that wraps Editor and overrides its behavior. pub mod actions; -mod blink_manager; +pub mod blink_manager; +mod bracket_colorization; mod clangd_ext; pub mod code_context_menus; pub mod display_map; @@ -35,6 +36,7 @@ mod persistence; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; +mod split; pub mod tasks; #[cfg(test)] @@ -49,7 +51,7 @@ pub mod test; pub(crate) use actions::*; pub use display_map::{ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder}; -pub use edit_prediction::Direction; +pub use edit_prediction_types::Direction; pub use editor_settings::{ CurrentLineHighlight, DocumentColorsRenderMode, EditorSettings, HideMouseMode, ScrollBeyondLastLine, ScrollbarAxes, SearchSettings, ShowMinimap, @@ -64,18 +66,16 @@ pub use items::MAX_TAB_TITLE_LEN; pub use lsp::CompletionContext; pub use lsp_ext::lsp_tasks; pub use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey, - RowInfo, ToOffset, ToPoint, + Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer, + MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset, + ToPoint, }; +pub use split::SplittableEditor; pub use text::Bias; -use ::git::{ - Restore, - blame::{BlameEntry, ParsedCommitMessage}, - status::FileStatus, -}; +use ::git::{Restore, blame::BlameEntry, commit::ParsedCommitMessage, status::FileStatus}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder, BuildError}; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result, anyhow, bail}; use blink_manager::BlinkManager; use buffer_diff::DiffHunkStatus; use client::{Collaborator, ParticipantIndex, parse_zed_link}; @@ -88,7 +88,9 @@ use collections::{BTreeMap, HashMap, HashSet, VecDeque}; use convert_case::{Case, Casing}; use dap::TelemetrySpawnLocation; use display_map::*; -use edit_prediction::{EditPredictionProvider, EditPredictionProviderHandle}; +use edit_prediction_types::{ + EditPredictionDelegate, EditPredictionDelegateHandle, EditPredictionGranularity, +}; use editor_settings::{GoToDefinitionFallback, Minimap as MinimapSettings}; use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap, layout_line}; use futures::{ @@ -103,10 +105,11 @@ use gpui::{ AvailableSpace, Background, Bounds, ClickEvent, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Edges, Entity, EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, Global, HighlightStyle, Hsla, KeyContext, Modifiers, - MouseButton, MouseDownEvent, PaintQuad, ParentElement, Pixels, Render, ScrollHandle, - SharedString, Size, Stateful, Styled, Subscription, Task, TextStyle, TextStyleRefinement, - UTF16Selection, UnderlineStyle, UniformListScrollHandle, WeakEntity, WeakFocusHandle, Window, - div, point, prelude::*, pulsating_between, px, relative, size, + MouseButton, MouseDownEvent, MouseMoveEvent, PaintQuad, ParentElement, Pixels, PressureStage, + Render, ScrollHandle, SharedString, Size, Stateful, Styled, Subscription, Task, TextRun, + TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle, UniformListScrollHandle, + WeakEntity, WeakFocusHandle, Window, div, point, prelude::*, pulsating_between, px, relative, + size, }; use hover_links::{HoverLink, HoveredLinkState, find_file}; use hover_popover::{HoverState, hide_hover}; @@ -117,11 +120,12 @@ use language::{ AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow, BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape, DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind, - IndentSize, Language, OffsetRangeExt, OutlineItem, Point, Runnable, RunnableRange, Selection, - SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery, + IndentSize, Language, LanguageName, LanguageRegistry, LanguageScope, OffsetRangeExt, + OutlineItem, Point, Runnable, Selection, SelectionGoal, TextObject, TransactionId, + TreeSitterOptions, WordsQuery, language_settings::{ - self, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings, - language_settings, + self, LanguageSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, + all_language_settings, language_settings, }, point_from_lsp, point_to_lsp, text_diff_with_options, }; @@ -142,8 +146,8 @@ use persistence::DB; use project::{ BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId, - InvalidationStrategy, Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, - ProjectPath, ProjectTransaction, TaskSourceKind, + InvalidationStrategy, Location, LocationLink, LspAction, PrepareRenameResponse, Project, + ProjectItem, ProjectPath, ProjectTransaction, TaskSourceKind, debugger::{ breakpoint_store::{ Breakpoint, BreakpointEditAction, BreakpointSessionState, BreakpointState, @@ -174,10 +178,11 @@ use std::{ borrow::Cow, cell::{OnceCell, RefCell}, cmp::{self, Ordering, Reverse}, + collections::hash_map, iter::{self, Peekable}, mem, num::NonZeroU32, - ops::{Deref, DerefMut, Not, Range, RangeInclusive}, + ops::{ControlFlow, Deref, DerefMut, Not, Range, RangeInclusive}, path::{Path, PathBuf}, rc::Rc, sync::Arc, @@ -186,7 +191,7 @@ use std::{ use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables}; use text::{BufferId, FromAnchor, OffsetUtf16, Rope, ToOffset as _}; use theme::{ - ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings, + AccentColors, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, ThemeSettings, observe_buffer_font_size_adjustment, }; use ui::{ @@ -279,6 +284,9 @@ pub enum ConflictsTheirs {} pub enum ConflictsOursMarker {} pub enum ConflictsTheirsMarker {} +pub struct HunkAddedColor; +pub struct HunkRemovedColor; + #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Navigated { Yes, @@ -302,6 +310,7 @@ enum DisplayDiffHunk { display_row_range: Range, multi_buffer_range: Range, status: DiffHunkStatus, + word_diffs: Vec>, }, } @@ -342,8 +351,8 @@ pub fn init(cx: &mut App) { ) .detach(); } - }); - cx.on_action(move |_: &workspace::NewWindow, cx| { + }) + .on_action(move |_: &workspace::NewWindow, cx| { let app_state = workspace::AppState::global(cx); if let Some(app_state) = app_state.upgrade() { workspace::open_new( @@ -371,6 +380,7 @@ pub trait DiagnosticRenderer { buffer_id: BufferId, snapshot: EditorSnapshot, editor: WeakEntity, + language_registry: Option>, cx: &mut App, ) -> Vec>; @@ -379,6 +389,7 @@ pub trait DiagnosticRenderer { diagnostic_group: Vec>, range: Range, buffer_id: BufferId, + language_registry: Option>, cx: &mut App, ) -> Option>; @@ -564,7 +575,7 @@ impl Default for EditorStyle { } } -pub fn make_inlay_hints_style(cx: &mut App) -> HighlightStyle { +pub fn make_inlay_hints_style(cx: &App) -> HighlightStyle { let show_background = language_settings::language_settings(None, None, cx) .inlay_hints .show_background; @@ -587,7 +598,7 @@ pub fn make_inlay_hints_style(cx: &mut App) -> HighlightStyle { style } -pub fn make_suggestion_styles(cx: &mut App) -> EditPredictionStyles { +pub fn make_suggestion_styles(cx: &App) -> EditPredictionStyles { EditPredictionStyles { insertion: HighlightStyle { color: Some(cx.theme().status().predictive), @@ -715,7 +726,10 @@ impl EditorActionId { // type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor; // type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option; -type BackgroundHighlight = (fn(&Theme) -> Hsla, Arc<[Range]>); +type BackgroundHighlight = ( + Arc Hsla + Send + Sync>, + Arc<[Range]>, +); type GutterHighlight = (fn(&App) -> Hsla, Vec>); #[derive(Default)] @@ -853,9 +867,6 @@ pub struct ResolvedTasks { position: Anchor, } -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] -struct BufferOffset(usize); - /// Addons allow storing per-editor state in other crates (e.g. Vim) pub trait Addon: 'static { fn extend_key_context(&self, _: &mut KeyContext, _: &App) {} @@ -1068,6 +1079,7 @@ pub struct Editor { show_breakpoints: Option, show_wrap_guides: Option, show_indent_guides: Option, + buffers_with_disabled_indent_guides: HashSet, highlight_order: usize, highlighted_rows: HashMap>, background_highlights: HashMap, @@ -1095,7 +1107,11 @@ pub struct Editor { pending_rename: Option, searchable: bool, cursor_shape: CursorShape, + /// Whether the cursor is offset one character to the left when something is + /// selected (needed for vim visual mode) + cursor_offset_on_selection: bool, current_line_highlight: Option, + pub collapse_matches: bool, autoindent_mode: Option, workspace: Option<(WeakEntity, Option)>, input_enabled: bool, @@ -1105,9 +1121,10 @@ pub struct Editor { remote_id: Option, pub hover_state: HoverState, pending_mouse_down: Option>>>, + prev_pressure_stage: Option, gutter_hovered: bool, hovered_link_state: Option, - edit_prediction_provider: Option, + edit_prediction_provider: Option, code_action_providers: Vec>, active_edit_prediction: Option, /// Used to prevent flickering as the user types while the menu is open @@ -1115,6 +1132,7 @@ pub struct Editor { edit_prediction_settings: EditPredictionSettings, edit_predictions_hidden_for_vim_mode: bool, show_edit_predictions_override: Option, + show_completions_on_input_override: Option, menu_edit_predictions_policy: MenuEditPredictionsPolicy, edit_prediction_preview: EditPredictionPreview, edit_prediction_indent_conflict: bool, @@ -1163,6 +1181,7 @@ pub struct Editor { gutter_breakpoint_indicator: (Option, Option>), hovered_diff_hunk_row: Option, pull_diagnostics_task: Task<()>, + pull_diagnostics_background_task: Task<()>, in_project_search: bool, previous_search_ranges: Option]>>, breadcrumb_header: Option, @@ -1192,6 +1211,16 @@ pub struct Editor { folding_newlines: Task<()>, select_next_is_case_sensitive: Option, pub lookup_key: Option>, + applicable_language_settings: HashMap, LanguageSettings>, + accent_data: Option, + fetched_tree_sitter_chunks: HashMap>>, + use_base_text_line_numbers: bool, +} + +#[derive(Debug, PartialEq)] +struct AccentData { + colors: AccentColors, + overrides: Vec, } fn debounce_value(debounce_ms: u64) -> Option { @@ -1224,6 +1253,7 @@ impl NextScrollCursorCenterTopBottom { pub struct EditorSnapshot { pub mode: EditorMode, show_gutter: bool, + offset_content: bool, show_line_numbers: Option, show_git_diff_gutter: Option, show_code_actions: Option, @@ -1297,8 +1327,9 @@ struct SelectionHistoryEntry { add_selections_state: Option, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)] enum SelectionHistoryMode { + #[default] Normal, Undoing, Redoing, @@ -1311,12 +1342,6 @@ struct HoveredCursor { selection_id: usize, } -impl Default for SelectionHistoryMode { - fn default() -> Self { - Self::Normal - } -} - #[derive(Debug)] /// SelectionEffects controls the side-effects of updating the selection. /// @@ -1543,8 +1568,8 @@ pub struct RenameState { struct InvalidationStack(Vec); -struct RegisteredEditPredictionProvider { - provider: Arc, +struct RegisteredEditPredictionDelegate { + provider: Arc, _subscription: Subscription, } @@ -1572,11 +1597,50 @@ pub struct ClipboardSelection { pub is_entire_line: bool, /// The indentation of the first line when this content was originally copied. pub first_line_indent: u32, + #[serde(default)] + pub file_path: Option, + #[serde(default)] + pub line_range: Option>, +} + +impl ClipboardSelection { + pub fn for_buffer( + len: usize, + is_entire_line: bool, + range: Range, + buffer: &MultiBufferSnapshot, + project: Option<&Entity>, + cx: &App, + ) -> Self { + let first_line_indent = buffer + .indent_size_for_line(MultiBufferRow(range.start.row)) + .len; + + let file_path = util::maybe!({ + let project = project?.read(cx); + let file = buffer.file_at(range.start)?; + let project_path = ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path().clone(), + }; + project.absolute_path(&project_path, cx) + }); + + let line_range = file_path.as_ref().map(|_| range.start.row..=range.end.row); + + Self { + len, + is_entire_line, + first_line_indent, + file_path, + line_range, + } + } } // selections, scroll behavior, was newest selection reversed type SelectSyntaxNodeHistoryState = ( - Box<[Selection]>, + Box<[Selection]>, SelectSyntaxNodeScrollBehavior, bool, ); @@ -1636,7 +1700,7 @@ pub(crate) struct FocusedBlock { focus_handle: WeakFocusHandle, } -#[derive(Clone)] +#[derive(Clone, Debug)] enum JumpData { MultiBufferRow { row: MultiBufferRow, @@ -1766,7 +1830,11 @@ impl Editor { Editor::new_internal(mode, buffer, project, None, window, cx) } - pub fn sticky_headers(&self, cx: &App) -> Option>> { + pub fn sticky_headers( + &self, + style: &EditorStyle, + cx: &App, + ) -> Option>> { let multi_buffer = self.buffer().read(cx); let multi_buffer_snapshot = multi_buffer.snapshot(cx); let multi_buffer_visible_start = self @@ -1779,20 +1847,19 @@ impl Editor { let start_row = (multi_buffer_visible_start.row).min(max_row); let end_row = (multi_buffer_visible_start.row + 10).min(max_row); - if let Some((excerpt_id, buffer_id, buffer)) = multi_buffer.read(cx).as_singleton() { + if let Some((excerpt_id, _, buffer)) = multi_buffer.read(cx).as_singleton() { let outline_items = buffer .outline_items_containing( Point::new(start_row, 0)..Point::new(end_row, 0), true, - self.style().map(|style| style.syntax.as_ref()), + Some(style.syntax.as_ref()), ) .into_iter() .map(|outline_item| OutlineItem { depth: outline_item.depth, - range: Anchor::range_in_buffer(*excerpt_id, buffer_id, outline_item.range), + range: Anchor::range_in_buffer(*excerpt_id, outline_item.range), source_range_for_text: Anchor::range_in_buffer( *excerpt_id, - buffer_id, outline_item.source_range_for_text, ), text: outline_item.text, @@ -1800,10 +1867,10 @@ impl Editor { name_ranges: outline_item.name_ranges, body_range: outline_item .body_range - .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)), + .map(|range| Anchor::range_in_buffer(*excerpt_id, range)), annotation_range: outline_item .annotation_range - .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)), + .map(|range| Anchor::range_in_buffer(*excerpt_id, range)), }); return Some(outline_items.collect()); } @@ -1887,7 +1954,11 @@ impl Editor { let selections = SelectionsCollection::new(); let blink_manager = cx.new(|cx| { - let mut blink_manager = BlinkManager::new(CURSOR_BLINK_INTERVAL, cx); + let mut blink_manager = BlinkManager::new( + CURSOR_BLINK_INTERVAL, + |cx| EditorSettings::get_global(cx).cursor_blink, + cx, + ); if is_minimap { blink_manager.disable(cx); } @@ -1931,16 +2002,18 @@ impl Editor { } } project::Event::SnippetEdit(id, snippet_edits) => { - if let Some(buffer) = editor.buffer.read(cx).buffer(*id) { + // todo(lw): Non singletons + if let Some(buffer) = editor.buffer.read(cx).as_singleton() { + let snapshot = buffer.read(cx).snapshot(); let focus_handle = editor.focus_handle(cx); - if focus_handle.is_focused(window) { - let snapshot = buffer.read(cx).snapshot(); + if snapshot.remote_id() == *id && focus_handle.is_focused(window) { for (range, snippet) in snippet_edits { - let editor_range = + let buffer_range = language::range_from_lsp(*range).to_offset(&snapshot); editor .insert_snippet( - &[editor_range], + &[MultiBufferOffset(buffer_range.start) + ..MultiBufferOffset(buffer_range.end)], snippet.clone(), window, cx, @@ -1987,46 +2060,34 @@ impl Editor { }) }); }); - let edited_buffers_already_open = { - let other_editors: Vec> = workspace - .read(cx) - .panes() - .iter() - .flat_map(|pane| pane.read(cx).items_of_type::()) - .filter(|editor| editor.entity_id() != cx.entity_id()) - .collect(); - - transaction.0.keys().all(|buffer| { - other_editors.iter().any(|editor| { - let multi_buffer = editor.read(cx).buffer(); - multi_buffer.read(cx).is_singleton() - && multi_buffer.read(cx).as_singleton().map_or( - false, - |singleton| { - singleton.entity_id() == buffer.entity_id() - }, - ) - }) - }) - }; - if !edited_buffers_already_open { - let workspace = workspace.downgrade(); - let transaction = transaction.clone(); - cx.defer_in(window, move |_, window, cx| { - cx.spawn_in(window, async move |editor, cx| { - Self::open_project_transaction( - &editor, - workspace, - transaction, - "Rename".to_string(), - cx, - ) - .await - .ok() - }) - .detach(); - }); - } + + Self::open_transaction_for_hidden_buffers( + workspace, + transaction.clone(), + "Rename".to_string(), + window, + cx, + ); + } + } + + project::Event::WorkspaceEditApplied(transaction) => { + let Some(workspace) = editor.workspace() else { + return; + }; + let Some(active_editor) = workspace.read(cx).active_item_as::(cx) + else { + return; + }; + + if active_editor.entity_id() == cx.entity_id() { + Self::open_transaction_for_hidden_buffers( + workspace, + transaction.clone(), + "LSP Edit".to_string(), + window, + cx, + ); } } @@ -2181,6 +2242,7 @@ impl Editor { show_breakpoints: None, show_wrap_guides: None, show_indent_guides, + buffers_with_disabled_indent_guides: HashSet::default(), highlight_order: 0, highlighted_rows: HashMap::default(), background_highlights: HashMap::default(), @@ -2211,9 +2273,10 @@ impl Editor { cursor_shape: EditorSettings::get_global(cx) .cursor_shape .unwrap_or_default(), + cursor_offset_on_selection: false, current_line_highlight: None, autoindent_mode: Some(AutoindentMode::EachLine), - + collapse_matches: false, workspace: None, input_enabled: !is_minimap, use_modal_editing: full_mode, @@ -2226,6 +2289,7 @@ impl Editor { remote_id: None, hover_state: HoverState::default(), pending_mouse_down: None, + prev_pressure_stage: None, hovered_link_state: None, edit_prediction_provider: None, active_edit_prediction: None, @@ -2250,6 +2314,7 @@ impl Editor { editor_actions: Rc::default(), edit_predictions_hidden_for_vim_mode: false, show_edit_predictions_override: None, + show_completions_on_input_override: None, menu_edit_predictions_policy: MenuEditPredictionsPolicy::ByProvider, edit_prediction_settings: EditPredictionSettings::Disabled, edit_prediction_indent_conflict: false, @@ -2303,6 +2368,7 @@ impl Editor { .unwrap_or_default(), tasks_update_task: None, pull_diagnostics_task: Task::ready(()), + pull_diagnostics_background_task: Task::ready(()), colors: None, refresh_colors_task: Task::ready(()), inlay_hints: None, @@ -2335,12 +2401,19 @@ impl Editor { folding_newlines: Task::ready(()), lookup_key: None, select_next_is_case_sensitive: None, + applicable_language_settings: HashMap::default(), + accent_data: None, + fetched_tree_sitter_chunks: HashMap::default(), + use_base_text_line_numbers: false, }; if is_minimap { return editor; } + editor.applicable_language_settings = editor.fetch_applicable_language_settings(cx); + editor.accent_data = editor.fetch_accent_data(cx); + if let Some(breakpoints) = editor.breakpoint_store.as_ref() { editor ._subscriptions @@ -2380,13 +2453,17 @@ impl Editor { InlayHintRefreshReason::NewLinesShown, cx, ); + editor.colorize_brackets(false, cx); }) .ok(); }); } } EditorEvent::Edited { .. } => { - if vim_flavor(cx).is_none() { + let vim_mode = vim_mode_setting::VimModeSetting::try_get(cx) + .map(|vim_mode| vim_mode.0) + .unwrap_or(false); + if !vim_mode { let display_map = editor.display_snapshot(cx); let selections = editor.selections.all_adjusted_display(&display_map); let pop_state = editor @@ -2468,7 +2545,6 @@ impl Editor { if let Some(buffer) = multi_buffer.read(cx).as_singleton() { editor.register_buffer(buffer.read(cx).remote_id(), cx); } - editor.update_lsp_data(None, window, cx); editor.report_editor_event(ReportEditorEvent::EditorOpened, None, cx); } @@ -2514,7 +2590,7 @@ impl Editor { } self.selections - .disjoint_in_range::(range.clone(), &self.display_snapshot(cx)) + .disjoint_in_range::(range.clone(), &self.display_snapshot(cx)) .into_iter() .any(|selection| { // This is needed to cover a corner case, if we just check for an existing @@ -2633,6 +2709,10 @@ impl Editor { key_context.add("end_of_input"); } + if self.has_any_expanded_diff_hunks(cx) { + key_context.add("diffs_expanded"); + } + key_context } @@ -2685,21 +2765,24 @@ impl Editor { pub fn accept_edit_prediction_keybind( &self, - accept_partial: bool, + granularity: EditPredictionGranularity, window: &mut Window, cx: &mut App, ) -> AcceptEditPredictionBinding { let key_context = self.key_context_internal(true, window, cx); let in_conflict = self.edit_prediction_in_conflict(); - let bindings = if accept_partial { - window.bindings_for_action_in_context(&AcceptPartialEditPrediction, key_context) - } else { - window.bindings_for_action_in_context(&AcceptEditPrediction, key_context) - }; + let bindings = + match granularity { + EditPredictionGranularity::Word => window + .bindings_for_action_in_context(&AcceptNextWordEditPrediction, key_context), + EditPredictionGranularity::Line => window + .bindings_for_action_in_context(&AcceptNextLineEditPrediction, key_context), + EditPredictionGranularity::Full => { + window.bindings_for_action_in_context(&AcceptEditPrediction, key_context) + } + }; - // TODO: if the binding contains multiple keystrokes, display all of them, not - // just the first one. AcceptEditPredictionBinding(bindings.into_iter().rev().find(|binding| { !in_conflict || binding @@ -2854,6 +2937,7 @@ impl Editor { EditorSnapshot { mode: self.mode.clone(), show_gutter: self.show_gutter, + offset_content: self.offset_content, show_line_numbers: self.show_line_numbers, show_git_diff_gutter: self.show_git_diff_gutter, show_code_actions: self.show_code_actions, @@ -2948,9 +3032,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) where - T: EditPredictionProvider, + T: EditPredictionDelegate, { - self.edit_prediction_provider = provider.map(|provider| RegisteredEditPredictionProvider { + self.edit_prediction_provider = provider.map(|provider| RegisteredEditPredictionDelegate { _subscription: cx.observe_in(&provider, window, |this, _, window, cx| { if this.focus_handle.is_focused(window) { this.update_visible_edit_prediction(window, cx); @@ -3004,6 +3088,14 @@ impl Editor { cx.notify(); } + pub fn cursor_shape(&self) -> CursorShape { + self.cursor_shape + } + + pub fn set_cursor_offset_on_selection(&mut self, set_cursor_offset_on_selection: bool) { + self.cursor_offset_on_selection = set_cursor_offset_on_selection; + } + pub fn set_current_line_highlight( &mut self, current_line_highlight: Option, @@ -3011,17 +3103,21 @@ impl Editor { self.current_line_highlight = current_line_highlight; } - pub fn range_for_match( - &self, - range: &Range, - collapse: bool, - ) -> Range { - if collapse { + pub fn set_collapse_matches(&mut self, collapse_matches: bool) { + self.collapse_matches = collapse_matches; + } + + pub fn range_for_match(&self, range: &Range) -> Range { + if self.collapse_matches { return range.start..range.start; } range.clone() } + pub fn clip_at_line_ends(&mut self, cx: &mut Context) -> bool { + self.display_map.read(cx).clip_at_line_ends + } + pub fn set_clip_at_line_ends(&mut self, clip: bool, cx: &mut Context) { if self.display_map.read(cx).clip_at_line_ends != clip { self.display_map @@ -3109,6 +3205,10 @@ impl Editor { } } + pub fn set_show_completions_on_input(&mut self, show_completions_on_input: Option) { + self.show_completions_on_input_override = show_completions_on_input; + } + pub fn set_show_edit_predictions( &mut self, show_edit_predictions: Option, @@ -3167,7 +3267,9 @@ impl Editor { // Copy selections to primary selection buffer #[cfg(any(target_os = "linux", target_os = "freebsd"))] if local { - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); let buffer_handle = self.buffer.read(cx).read(cx); let mut text = String::new(); @@ -3228,7 +3330,7 @@ impl Editor { } if local { - if let Some(buffer_id) = new_cursor_position.buffer_id { + if let Some(buffer_id) = new_cursor_position.text_anchor.buffer_id { self.register_buffer(buffer_id, cx); } @@ -3310,7 +3412,8 @@ impl Editor { data.selections = inmemory_selections; }); - if WorkspaceSettings::get(None, cx).restore_on_startup != RestoreOnStartupBehavior::None + if WorkspaceSettings::get(None, cx).restore_on_startup + != RestoreOnStartupBehavior::EmptyTab && let Some(workspace_id) = self.workspace_serialization_id(cx) { let snapshot = self.buffer().read(cx).snapshot(cx); @@ -3323,8 +3426,8 @@ impl Editor { .iter() .map(|selection| { ( - selection.start.to_offset(&snapshot), - selection.end.to_offset(&snapshot), + selection.start.to_offset(&snapshot).0, + selection.end.to_offset(&snapshot).0, ) }) .collect(); @@ -3350,7 +3453,8 @@ impl Editor { use text::ToPoint as _; if self.mode.is_minimap() - || WorkspaceSettings::get(None, cx).restore_on_startup == RestoreOnStartupBehavior::None + || WorkspaceSettings::get(None, cx).restore_on_startup + == RestoreOnStartupBehavior::EmptyTab { return; } @@ -3366,7 +3470,7 @@ impl Editor { return; }; let inmemory_folds = display_snapshot - .folds_in_range(0..display_snapshot.buffer_snapshot().len()) + .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { fold.range.start.text_anchor.to_point(&snapshot) ..fold.range.end.text_anchor.to_point(&snapshot) @@ -3382,7 +3486,7 @@ impl Editor { let background_executor = cx.background_executor().clone(); let editor_id = cx.entity().entity_id().as_u64() as ItemId; let db_folds = display_snapshot - .folds_in_range(0..display_snapshot.buffer_snapshot().len()) + .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { ( fold.range.start.text_anchor.to_offset(&snapshot), @@ -3639,7 +3743,10 @@ impl Editor { cx: &mut Context, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let tail = self.selections.newest::(&display_map).tail(); + let tail = self + .selections + .newest::(&display_map) + .tail(); let click_count = click_count.max(match self.selections.select_mode() { SelectMode::Character => 1, SelectMode::Word(_) => 2, @@ -3705,7 +3812,7 @@ impl Editor { ) { if !self.focus_handle.is_focused(window) { self.last_focused_descendant = None; - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -3748,7 +3855,7 @@ impl Editor { auto_scroll = true; } _ => { - start = buffer.anchor_before(0); + start = buffer.anchor_before(MultiBufferOffset(0)); end = buffer.anchor_before(buffer.len()); mode = SelectMode::All; auto_scroll = false; @@ -3810,7 +3917,7 @@ impl Editor { ) { if !self.focus_handle.is_focused(window) { self.last_focused_descendant = None; - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -3961,7 +4068,9 @@ impl Editor { fn end_selection(&mut self, window: &mut Window, cx: &mut Context) { self.columnar_selection_state.take(); if let Some(pending_mode) = self.selections.pending_mode() { - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select(selections); s.clear_pending(); @@ -4076,17 +4185,24 @@ impl Editor { self.selection_mark_mode = false; self.selection_drag_state = SelectionDragState::None; + if self.dismiss_menus_and_popups(true, window, cx) { + cx.notify(); + return; + } if self.clear_expanded_diff_hunks(cx) { cx.notify(); return; } - if self.dismiss_menus_and_popups(true, window, cx) { + if self.show_git_blame_gutter { + self.show_git_blame_gutter = false; + cx.notify(); return; } if self.mode.is_full() && self.change_selections(Default::default(), window, cx, |s| s.try_cancel()) { + cx.notify(); return; } @@ -4099,44 +4215,23 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> bool { - if self.take_rename(false, window, cx).is_some() { - return true; - } - - if self.hide_blame_popover(true, cx) { - return true; - } - - if hide_hover(self, cx) { - return true; - } - - if self.hide_signature_help(cx, SignatureHelpHiddenBy::Escape) { - return true; - } - - if self.hide_context_menu(window, cx).is_some() { - return true; - } - - if self.mouse_context_menu.take().is_some() { - return true; - } + let mut dismissed = false; - if is_user_requested && self.discard_edit_prediction(true, cx) { - return true; - } - - if self.snippet_stack.pop().is_some() { - return true; - } + dismissed |= self.take_rename(false, window, cx).is_some(); + dismissed |= self.hide_blame_popover(true, cx); + dismissed |= hide_hover(self, cx); + dismissed |= self.hide_signature_help(cx, SignatureHelpHiddenBy::Escape); + dismissed |= self.hide_context_menu(window, cx).is_some(); + dismissed |= self.mouse_context_menu.take().is_some(); + dismissed |= is_user_requested && self.discard_edit_prediction(true, cx); + dismissed |= self.snippet_stack.pop().is_some(); if self.mode.is_full() && matches!(self.active_diagnostics, ActiveDiagnostic::Group(_)) { self.dismiss_diagnostics(cx); - return true; + dismissed = true; } - false + dismissed } fn linked_editing_ranges_for( @@ -4176,8 +4271,8 @@ impl Editor { continue; } if self.selections.disjoint_anchor_ranges().any(|s| { - if s.start.buffer_id != selection.start.buffer_id - || s.end.buffer_id != selection.end.buffer_id + if s.start.text_anchor.buffer_id != selection.start.buffer_id + || s.end.text_anchor.buffer_id != selection.end.buffer_id { return false; } @@ -4296,10 +4391,50 @@ impl Editor { && bracket_pair.start.len() == 1 { let target = bracket_pair.start.chars().next().unwrap(); + let mut byte_offset = 0u32; let current_line_count = snapshot .reversed_chars_at(selection.start) .take_while(|&c| c != '\n') - .filter(|&c| c == target) + .filter(|c| { + byte_offset += c.len_utf8() as u32; + if *c != target { + return false; + } + + let point = Point::new( + selection.start.row, + selection.start.column.saturating_sub(byte_offset), + ); + + let is_enabled = snapshot + .language_scope_at(point) + .and_then(|scope| { + scope + .brackets() + .find(|(pair, _)| { + pair.start == bracket_pair.start + }) + .map(|(_, enabled)| enabled) + }) + .unwrap_or(true); + + let is_delimiter = snapshot + .language_scope_at(Point::new( + point.row, + point.column + 1, + )) + .and_then(|scope| { + scope + .brackets() + .find(|(pair, _)| { + pair.start == bracket_pair.start + }) + .map(|(_, enabled)| !enabled) + }) + .unwrap_or(false); + + is_enabled && !is_delimiter + }) .count(); current_line_count % 2 == 1 } else { @@ -4488,17 +4623,19 @@ impl Editor { let new_anchor_selections = new_selections.iter().map(|e| &e.0); let new_selection_deltas = new_selections.iter().map(|e| e.1); let map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); - let new_selections = - resolve_selections_wrapping_blocks::(new_anchor_selections, &map) - .zip(new_selection_deltas) - .map(|(selection, delta)| Selection { - id: selection.id, - start: selection.start + delta, - end: selection.end + delta, - reversed: selection.reversed, - goal: SelectionGoal::None, - }) - .collect::>(); + let new_selections = resolve_selections_wrapping_blocks::( + new_anchor_selections, + &map, + ) + .zip(new_selection_deltas) + .map(|(selection, delta)| Selection { + id: selection.id, + start: selection.start + delta, + end: selection.end + delta, + reversed: selection.reversed, + goal: SelectionGoal::None, + }) + .collect::>(); let mut i = 0; for (position, delta, selection_id, pair) in new_autoclose_regions { @@ -4634,7 +4771,9 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = { - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); let multi_buffer = this.buffer.read(cx); let buffer = multi_buffer.snapshot(cx); selections @@ -4648,205 +4787,51 @@ impl Editor { let end = selection.end; let selection_is_empty = start == end; let language_scope = buffer.language_scope_at(start); - let ( - comment_delimiter, - doc_delimiter, - insert_extra_newline, - indent_on_newline, - indent_on_extra_newline, - ) = if let Some(language) = &language_scope { - let mut insert_extra_newline = - insert_extra_newline_brackets(&buffer, start..end, language) - || insert_extra_newline_tree_sitter(&buffer, start..end); - - // Comment extension on newline is allowed only for cursor selections - let comment_delimiter = maybe!({ - if !selection_is_empty { - return None; - } - - if !multi_buffer.language_settings(cx).extend_comment_on_newline { - return None; - } - - let delimiters = language.line_comment_prefixes(); - let max_len_of_delimiter = - delimiters.iter().map(|delimiter| delimiter.len()).max()?; - let (snapshot, range) = - buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; - - let num_of_whitespaces = snapshot - .chars_for_range(range.clone()) - .take_while(|c| c.is_whitespace()) - .count(); - let comment_candidate = snapshot - .chars_for_range(range.clone()) - .skip(num_of_whitespaces) - .take(max_len_of_delimiter) - .collect::(); - let (delimiter, trimmed_len) = delimiters - .iter() - .filter_map(|delimiter| { - let prefix = delimiter.trim_end(); - if comment_candidate.starts_with(prefix) { - Some((delimiter, prefix.len())) - } else { - None - } - }) - .max_by_key(|(_, len)| *len)?; - - if let Some(BlockCommentConfig { - start: block_start, .. - }) = language.block_comment() - { - let block_start_trimmed = block_start.trim_end(); - if block_start_trimmed.starts_with(delimiter.trim_end()) { - let line_content = snapshot - .chars_for_range(range) - .skip(num_of_whitespaces) - .take(block_start_trimmed.len()) - .collect::(); - - if line_content.starts_with(block_start_trimmed) { - return None; - } + let (comment_delimiter, doc_delimiter, newline_formatting) = + if let Some(language) = &language_scope { + let mut newline_formatting = + NewlineFormatting::new(&buffer, start..end, language); + + // Comment extension on newline is allowed only for cursor selections + let comment_delimiter = maybe!({ + if !selection_is_empty { + return None; } - } - - let cursor_is_placed_after_comment_marker = - num_of_whitespaces + trimmed_len <= start_point.column as usize; - if cursor_is_placed_after_comment_marker { - Some(delimiter.clone()) - } else { - None - } - }); - - let mut indent_on_newline = IndentSize::spaces(0); - let mut indent_on_extra_newline = IndentSize::spaces(0); - - let doc_delimiter = maybe!({ - if !selection_is_empty { - return None; - } - - if !multi_buffer.language_settings(cx).extend_comment_on_newline { - return None; - } - let BlockCommentConfig { - start: start_tag, - end: end_tag, - prefix: delimiter, - tab_size: len, - } = language.documentation_comment()?; - let is_within_block_comment = buffer - .language_scope_at(start_point) - .is_some_and(|scope| scope.override_name() == Some("comment")); - if !is_within_block_comment { - return None; - } - - let (snapshot, range) = - buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; - - let num_of_whitespaces = snapshot - .chars_for_range(range.clone()) - .take_while(|c| c.is_whitespace()) - .count(); - - // It is safe to use a column from MultiBufferPoint in context of a single buffer ranges, because we're only ever looking at a single line at a time. - let column = start_point.column; - let cursor_is_after_start_tag = { - let start_tag_len = start_tag.len(); - let start_tag_line = snapshot - .chars_for_range(range.clone()) - .skip(num_of_whitespaces) - .take(start_tag_len) - .collect::(); - if start_tag_line.starts_with(start_tag.as_ref()) { - num_of_whitespaces + start_tag_len <= column as usize - } else { - false + if !multi_buffer.language_settings(cx).extend_comment_on_newline + { + return None; } - }; - let cursor_is_after_delimiter = { - let delimiter_trim = delimiter.trim_end(); - let delimiter_line = snapshot - .chars_for_range(range.clone()) - .skip(num_of_whitespaces) - .take(delimiter_trim.len()) - .collect::(); - if delimiter_line.starts_with(delimiter_trim) { - num_of_whitespaces + delimiter_trim.len() <= column as usize - } else { - false - } - }; + return comment_delimiter_for_newline( + &start_point, + &buffer, + language, + ); + }); - let cursor_is_before_end_tag_if_exists = { - let mut char_position = 0u32; - let mut end_tag_offset = None; - - 'outer: for chunk in snapshot.text_for_range(range) { - if let Some(byte_pos) = chunk.find(&**end_tag) { - let chars_before_match = - chunk[..byte_pos].chars().count() as u32; - end_tag_offset = - Some(char_position + chars_before_match); - break 'outer; - } - char_position += chunk.chars().count() as u32; + let doc_delimiter = maybe!({ + if !selection_is_empty { + return None; } - if let Some(end_tag_offset) = end_tag_offset { - let cursor_is_before_end_tag = column <= end_tag_offset; - if cursor_is_after_start_tag { - if cursor_is_before_end_tag { - insert_extra_newline = true; - } - let cursor_is_at_start_of_end_tag = - column == end_tag_offset; - if cursor_is_at_start_of_end_tag { - indent_on_extra_newline.len = *len; - } - } - cursor_is_before_end_tag - } else { - true + if !multi_buffer.language_settings(cx).extend_comment_on_newline + { + return None; } - }; - if (cursor_is_after_start_tag || cursor_is_after_delimiter) - && cursor_is_before_end_tag_if_exists - { - if cursor_is_after_start_tag { - indent_on_newline.len = *len; - } - Some(delimiter.clone()) - } else { - None - } - }); + return documentation_delimiter_for_newline( + &start_point, + &buffer, + language, + &mut newline_formatting, + ); + }); - ( - comment_delimiter, - doc_delimiter, - insert_extra_newline, - indent_on_newline, - indent_on_extra_newline, - ) - } else { - ( - None, - None, - false, - IndentSize::default(), - IndentSize::default(), - ) - }; + (comment_delimiter, doc_delimiter, newline_formatting) + } else { + (None, None, NewlineFormatting::default()) + }; let prevent_auto_indent = doc_delimiter.is_some(); let delimiter = comment_delimiter.or(doc_delimiter); @@ -4856,28 +4841,28 @@ impl Editor { let mut new_text = String::with_capacity( 1 + capacity_for_delimiter + existing_indent.len as usize - + indent_on_newline.len as usize - + indent_on_extra_newline.len as usize, + + newline_formatting.indent_on_newline.len as usize + + newline_formatting.indent_on_extra_newline.len as usize, ); new_text.push('\n'); new_text.extend(existing_indent.chars()); - new_text.extend(indent_on_newline.chars()); + new_text.extend(newline_formatting.indent_on_newline.chars()); if let Some(delimiter) = &delimiter { new_text.push_str(delimiter); } - if insert_extra_newline { + if newline_formatting.insert_extra_newline { new_text.push('\n'); new_text.extend(existing_indent.chars()); - new_text.extend(indent_on_extra_newline.chars()); + new_text.extend(newline_formatting.indent_on_extra_newline.chars()); } let anchor = buffer.anchor_after(end); let new_selection = selection.map(|_| anchor); ( ((start..end, new_text), prevent_auto_indent), - (insert_extra_newline, new_selection), + (newline_formatting.insert_extra_newline, new_selection), ) }) .unzip() @@ -4914,6 +4899,9 @@ impl Editor { this.change_selections(Default::default(), window, cx, |s| s.select(new_selections)); this.refresh_edit_prediction(true, false, window, cx); + if let Some(task) = this.trigger_on_type_formatting("\n".to_owned(), window, cx) { + task.detach_and_log_err(cx); + } }); } @@ -4978,6 +4966,9 @@ impl Editor { } } editor.edit(indent_edits, cx); + if let Some(format) = editor.trigger_on_type_formatting("\n".to_owned(), window, cx) { + format.detach_and_log_err(cx); + } }); } @@ -5040,6 +5031,9 @@ impl Editor { } } editor.edit(indent_edits, cx); + if let Some(format) = editor.trigger_on_type_formatting("\n".to_owned(), window, cx) { + format.detach_and_log_err(cx); + } }); } @@ -5134,7 +5128,9 @@ impl Editor { /// If any empty selections is touching the start of its innermost containing autoclose /// region, expand it to select the brackets. fn select_autoclose_pair(&mut self, window: &mut Window, cx: &mut Context) { - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let new_selections = self .selections_with_autoclose_regions(selections, &buffer) @@ -5145,7 +5141,7 @@ impl Editor { if let Some(region) = region { let mut range = region.range.to_offset(&buffer); - if selection.start == range.start && range.start >= region.pair.start.len() { + if selection.start == range.start && range.start.0 >= region.pair.start.len() { range.start -= region.pair.start.len(); if buffer.contains_str_at(range.start, ®ion.pair.start) && buffer.contains_str_at(range.end, ®ion.pair.end) @@ -5176,7 +5172,7 @@ impl Editor { if buffer.contains_str_at(selection.start, &pair.end) { let pair_start_len = pair.start.len(); if buffer.contains_str_at( - selection.start.saturating_sub(pair_start_len), + selection.start.saturating_sub_usize(pair_start_len), &pair.start, ) { selection.start -= pair_start_len; @@ -5277,12 +5273,10 @@ impl Editor { pub fn visible_excerpts( &self, + lsp_related_only: bool, cx: &mut Context, ) -> HashMap, clock::Global, Range)> { - let Some(project) = self.project() else { - return HashMap::default(); - }; - let project = project.read(cx); + let project = self.project().cloned(); let multi_buffer = self.buffer().read(cx); let multi_buffer_snapshot = multi_buffer.snapshot(cx); let multi_buffer_visible_start = self @@ -5300,6 +5294,18 @@ impl Editor { .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { + if !lsp_related_only { + return Some(( + excerpt_id, + ( + multi_buffer.buffer(buffer.remote_id()).unwrap(), + buffer.version().clone(), + excerpt_visible_range.start.0..excerpt_visible_range.end.0, + ), + )); + } + + let project = project.as_ref()?.read(cx); let buffer_file = project::File::from_dyn(buffer.file())?; let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; let worktree_entry = buffer_worktree @@ -5313,7 +5319,7 @@ impl Editor { ( multi_buffer.buffer(buffer.remote_id()).unwrap(), buffer.version().clone(), - excerpt_visible_range, + excerpt_visible_range.start.0..excerpt_visible_range.end.0, ), )) } @@ -5338,7 +5344,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { - if input.len() != 1 { + if input.chars().count() != 1 { return None; } @@ -5456,6 +5462,7 @@ impl Editor { } let buffer_position = multibuffer_snapshot.anchor_before(position); let Some(buffer) = buffer_position + .text_anchor .buffer_id .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) else { @@ -5463,6 +5470,25 @@ impl Editor { }; let buffer_snapshot = buffer.read(cx).snapshot(); + let menu_is_open = matches!( + self.context_menu.borrow().as_ref(), + Some(CodeContextMenu::Completions(_)) + ); + + let language = buffer_snapshot + .language_at(buffer_position.text_anchor) + .map(|language| language.name()); + + let language_settings = language_settings(language.clone(), buffer_snapshot.file(), cx); + let completion_settings = language_settings.completions.clone(); + + let show_completions_on_input = self + .show_completions_on_input_override + .unwrap_or(language_settings.show_completions_on_input); + if !menu_is_open && trigger.is_some() && !show_completions_on_input { + return; + } + let query: Option> = Self::completion_query(&multibuffer_snapshot, buffer_position) .map(|query| query.into()); @@ -5471,14 +5497,8 @@ impl Editor { // Hide the current completions menu when query is empty. Without this, cached // completions from before the trigger char may be reused (#32774). - if query.is_none() { - let menu_is_open = matches!( - self.context_menu.borrow().as_ref(), - Some(CodeContextMenu::Completions(_)) - ); - if menu_is_open { - self.hide_context_menu(window, cx); - } + if query.is_none() && menu_is_open { + self.hide_context_menu(window, cx); } let mut ignore_word_threshold = false; @@ -5510,7 +5530,14 @@ impl Editor { if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() { if filter_completions { - menu.filter(query.clone(), provider.clone(), window, cx); + menu.filter( + query.clone().unwrap_or_default(), + buffer_position.text_anchor, + &buffer, + provider.clone(), + window, + cx, + ); } // When `is_incomplete` is false, no need to re-query completions when the current query // is a suffix of the initial query. @@ -5519,7 +5546,7 @@ impl Editor { // If the new query is a suffix of the old query (typing more characters) and // the previous result was complete, the existing completions can be filtered. // - // Note that this is always true for snippet completions. + // Note that snippet completions are always complete. let query_matches = match (&menu.initial_query, &query) { (Some(initial_query), Some(query)) => query.starts_with(initial_query.as_ref()), (None, _) => true, @@ -5560,14 +5587,6 @@ impl Editor { (buffer_position..buffer_position, None) }; - let language = buffer_snapshot - .language_at(buffer_position) - .map(|language| language.name()); - - let completion_settings = language_settings(language.clone(), buffer_snapshot.file(), cx) - .completions - .clone(); - let show_completion_documentation = buffer_snapshot .settings_at(buffer_position, cx) .show_completion_documentation; @@ -5598,7 +5617,6 @@ impl Editor { position.text_anchor, trigger, trigger_in_words, - completions_source.is_some(), cx, ) }) @@ -5649,12 +5667,15 @@ impl Editor { }; let mut words = if load_word_completions { - cx.background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, - }) + cx.background_spawn({ + let buffer_snapshot = buffer_snapshot.clone(); + async move { + buffer_snapshot.words_in_range(WordsQuery { + fuzzy_contents: None, + range: word_search_range, + skip_digits, + }) + } }) } else { Task::ready(BTreeMap::default()) @@ -5664,8 +5685,11 @@ impl Editor { && provider.show_snippets() && let Some(project) = self.project() { + let char_classifier = buffer_snapshot + .char_classifier_at(buffer_position) + .scope_context(Some(CharScopeContext::Completion)); project.update(cx, |project, cx| { - snippet_completions(project, &buffer, buffer_position, cx) + snippet_completions(project, &buffer, buffer_position, char_classifier, cx) }) } else { Task::ready(Ok(CompletionResponse { @@ -5720,6 +5744,8 @@ impl Editor { replace_range: word_replace_range.clone(), new_text: word.clone(), label: CodeLabel::plain(word, None), + match_start: None, + snippet_deduplication_key: None, icon_path: None, documentation: None, source: CompletionSource::BufferWord { @@ -5760,6 +5786,11 @@ impl Editor { is_incomplete, buffer.clone(), completions.into(), + editor + .context_menu() + .borrow_mut() + .as_ref() + .map(|menu| menu.primary_scroll_handle()), display_options, snippet_sort_order, languages, @@ -5768,13 +5799,14 @@ impl Editor { ); let query = if filter_completions { query } else { None }; - let matches_task = if let Some(query) = query { - menu.do_async_filtering(query, cx) - } else { - Task::ready(menu.unfiltered_matches()) - }; - (menu, matches_task) - }) else { + let matches_task = menu.do_async_filtering( + query.unwrap_or_default(), + buffer_position, + &buffer, + cx, + ); + (menu, matches_task) + }) else { return; }; @@ -5789,7 +5821,7 @@ impl Editor { return; }; - // Only valid to take prev_menu because it the new menu is immediately set + // Only valid to take prev_menu because either the new menu is immediately set // below, or the menu is hidden. if let Some(CodeContextMenu::Completions(prev_menu)) = editor.context_menu.borrow_mut().take() @@ -5980,14 +6012,17 @@ impl Editor { .start .text_anchor .to_offset(buffer) - .saturating_sub(replace_range.start); + .saturating_sub(replace_range.start.0); let lookahead = replace_range .end + .0 .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer)); let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; let suffix = &old_text[lookbehind.min(old_text.len())..]; - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); let mut ranges = Vec::new(); let mut linked_edits = HashMap::<_, Vec<_>>::default(); @@ -5998,8 +6033,8 @@ impl Editor { let mut range = selection.range(); // if prefix is present, don't duplicate it - if snapshot.contains_str_at(range.start.saturating_sub(lookbehind), prefix) { - range.start = range.start.saturating_sub(lookbehind); + if snapshot.contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) { + range.start = range.start.saturating_sub_usize(lookbehind); // if suffix is also present, mimic the newest cursor and replace it if selection.id != newest_anchor.id @@ -6086,9 +6121,43 @@ impl Editor { } let provider = self.completion_provider.as_ref()?; + + let lsp_store = self.project().map(|project| project.read(cx).lsp_store()); + let command = lsp_store.as_ref().and_then(|lsp_store| { + let CompletionSource::Lsp { + lsp_completion, + server_id, + .. + } = &completion.source + else { + return None; + }; + let lsp_command = lsp_completion.command.as_ref()?; + let available_commands = lsp_store + .read(cx) + .lsp_server_capabilities + .get(server_id) + .and_then(|server_capabilities| { + server_capabilities + .execute_command_provider + .as_ref() + .map(|options| options.commands.as_slice()) + })?; + if available_commands.contains(&lsp_command.command) { + Some(CodeAction { + server_id: *server_id, + range: language::Anchor::MIN..language::Anchor::MIN, + lsp_action: LspAction::Command(lsp_command.clone()), + resolved: false, + }) + } else { + None + } + }); + drop(completion); let apply_edits = provider.apply_additional_edits_for_completion( - buffer_handle, + buffer_handle.clone(), completions_menu.completions.clone(), candidate_id, true, @@ -6102,8 +6171,29 @@ impl Editor { self.show_signature_help(&ShowSignatureHelp, window, cx); } - Some(cx.foreground_executor().spawn(async move { + Some(cx.spawn_in(window, async move |editor, cx| { apply_edits.await?; + + if let Some((lsp_store, command)) = lsp_store.zip(command) { + let title = command.lsp_action.title().to_owned(); + let project_transaction = lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.apply_code_action(buffer_handle, command, false, cx) + })? + .await + .context("applying post-completion command")?; + if let Some(workspace) = editor.read_with(cx, |editor, _| editor.workspace())? { + Self::open_project_transaction( + &editor, + workspace.downgrade(), + project_transaction, + title, + cx, + ) + .await?; + } + } + Ok(()) })) } @@ -6413,6 +6503,52 @@ impl Editor { } } + fn open_transaction_for_hidden_buffers( + workspace: Entity, + transaction: ProjectTransaction, + title: String, + window: &mut Window, + cx: &mut Context, + ) { + if transaction.0.is_empty() { + return; + } + + let edited_buffers_already_open = { + let other_editors: Vec> = workspace + .read(cx) + .panes() + .iter() + .flat_map(|pane| pane.read(cx).items_of_type::()) + .filter(|editor| editor.entity_id() != cx.entity_id()) + .collect(); + + transaction.0.keys().all(|buffer| { + other_editors.iter().any(|editor| { + let multi_buffer = editor.read(cx).buffer(); + multi_buffer.read(cx).is_singleton() + && multi_buffer + .read(cx) + .as_singleton() + .map_or(false, |singleton| { + singleton.entity_id() == buffer.entity_id() + }) + }) + }) + }; + if !edited_buffers_already_open { + let workspace = workspace.downgrade(); + cx.defer_in(window, move |_, window, cx| { + cx.spawn_in(window, async move |editor, cx| { + Self::open_project_transaction(&editor, workspace, transaction, title, cx) + .await + .ok() + }) + .detach(); + }); + } + } + pub async fn open_project_transaction( editor: &WeakEntity, workspace: WeakEntity, @@ -6488,7 +6624,7 @@ impl Editor { editor.update(cx, |editor, cx| { editor.highlight_background::( &ranges_to_highlight, - |theme| theme.colors().editor_highlighted_line_background, + |_, theme| theme.colors().editor_highlighted_line_background, cx, ); }); @@ -6572,7 +6708,7 @@ impl Editor { }) }) .on_click(cx.listener(move |editor, _: &ClickEvent, window, cx| { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.toggle_code_actions( &crate::actions::ToggleCodeActions { deployed_from: Some(crate::actions::CodeActionSource::Indicator( @@ -6689,6 +6825,9 @@ impl Editor { return; }; + if self.blame.is_none() { + self.start_git_blame(true, window, cx); + } let Some(blame) = self.blame.as_ref() else { return; }; @@ -6706,7 +6845,7 @@ impl Editor { }; let anchor = self.selections.newest_anchor().head(); - let position = self.to_pixel_point(anchor, &snapshot, window); + let position = self.to_pixel_point(anchor, &snapshot, window, cx); if let (Some(position), Some(last_bounds)) = (position, self.last_bounds) { self.show_blame_popover( buffer, @@ -6775,7 +6914,11 @@ impl Editor { } } - fn hide_blame_popover(&mut self, ignore_timeout: bool, cx: &mut Context) -> bool { + pub fn has_mouse_context_menu(&self) -> bool { + self.mouse_context_menu.is_some() + } + + pub fn hide_blame_popover(&mut self, ignore_timeout: bool, cx: &mut Context) -> bool { self.inline_blame_popover_show_task.take(); if let Some(state) = &mut self.inline_blame_popover { let hide_task = cx.spawn(async move |editor, cx| { @@ -6876,8 +7019,7 @@ impl Editor { continue; } - let range = - Anchor::range_in_buffer(excerpt_id, buffer_id, *start..*end); + let range = Anchor::range_in_buffer(excerpt_id, *start..*end); if highlight.kind == lsp::DocumentHighlightKind::WRITE { write_ranges.push(range); } else { @@ -6888,12 +7030,12 @@ impl Editor { this.highlight_background::( &read_ranges, - |theme| theme.colors().editor_document_highlight_read_background, + |_, theme| theme.colors().editor_document_highlight_read_background, cx, ); this.highlight_background::( &write_ranges, - |theme| theme.colors().editor_document_highlight_write_background, + |_, theme| theme.colors().editor_document_highlight_write_background, cx, ); cx.notify(); @@ -6937,6 +7079,7 @@ impl Editor { Some((query, selection_anchor_range)) } + #[ztracing::instrument(skip_all)] fn update_selection_occurrence_highlights( &mut self, query_text: String, @@ -6975,7 +7118,10 @@ impl Editor { for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { match_ranges.extend( regex - .search(buffer_snapshot, Some(search_range.clone())) + .search( + buffer_snapshot, + Some(search_range.start.0..search_range.end.0), + ) .await .into_iter() .filter_map(|match_range| { @@ -6983,11 +7129,8 @@ impl Editor { .anchor_after(search_range.start + match_range.start); let match_end = buffer_snapshot .anchor_before(search_range.start + match_range.end); - let match_anchor_range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.remote_id(), - match_start..match_end, - ); + let match_anchor_range = + Anchor::range_in_buffer(excerpt_id, match_start..match_end); (match_anchor_range != query_range).then_some(match_anchor_range) }), ); @@ -7001,7 +7144,7 @@ impl Editor { if !match_ranges.is_empty() { editor.highlight_background::( &match_ranges, - |theme| theme.colors().editor_document_highlight_bracket_background, + |_, theme| theme.colors().editor_document_highlight_bracket_background, cx, ) } @@ -7022,12 +7165,12 @@ impl Editor { } let task = cx.background_spawn(async move { let new_newlines = snapshot - .buffer_chars_at(0) + .buffer_chars_at(MultiBufferOffset(0)) .filter_map(|(c, i)| { if c == '\n' { Some( snapshot.buffer_snapshot().anchor_after(i) - ..snapshot.buffer_snapshot().anchor_before(i + 1), + ..snapshot.buffer_snapshot().anchor_before(i + 1usize), ) } else { None @@ -7035,7 +7178,7 @@ impl Editor { }) .collect::>(); let existing_newlines = snapshot - .folds_in_range(0..snapshot.buffer_snapshot().len()) + .folds_in_range(MultiBufferOffset(0)..snapshot.buffer_snapshot().len()) .filter_map(|fold| { if fold.placeholder.type_tag == Some(type_id) { Some(fold.range.start..fold.range.end) @@ -7081,6 +7224,7 @@ impl Editor { }); } + #[ztracing::instrument(skip_all)] fn refresh_selected_text_highlights( &mut self, on_buffer_edit: bool, @@ -7132,7 +7276,7 @@ impl Editor { .is_none_or(|(prev_anchor_range, _)| prev_anchor_range != &query_range) { let multi_buffer_start = multi_buffer_snapshot - .anchor_before(0) + .anchor_before(MultiBufferOffset(0)) .to_point(&multi_buffer_snapshot); let multi_buffer_end = multi_buffer_snapshot .anchor_after(multi_buffer_snapshot.len()) @@ -7259,7 +7403,7 @@ impl Editor { && self .edit_prediction_provider .as_ref() - .is_some_and(|provider| provider.provider.show_completions_in_menu()); + .is_some_and(|provider| provider.provider.show_predictions_in_menu()); let preview_requires_modifier = all_language_settings(file, cx).edit_predictions_mode() == EditPredictionsMode::Subtle; @@ -7320,26 +7464,6 @@ impl Editor { .unwrap_or(false) } - fn cycle_edit_prediction( - &mut self, - direction: Direction, - window: &mut Window, - cx: &mut Context, - ) -> Option<()> { - let provider = self.edit_prediction_provider()?; - let cursor = self.selections.newest_anchor().head(); - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - if self.edit_predictions_hidden_for_vim_mode || !self.should_show_edit_predictions() { - return None; - } - - provider.cycle(buffer, cursor_buffer_position, direction, cx); - self.update_visible_edit_prediction(window, cx); - - Some(()) - } - pub fn show_edit_prediction( &mut self, _: &ShowEditPrediction, @@ -7377,45 +7501,9 @@ impl Editor { .detach(); } - pub fn next_edit_prediction( - &mut self, - _: &NextEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.has_active_edit_prediction() { - self.cycle_edit_prediction(Direction::Next, window, cx); - } else { - let is_copilot_disabled = self - .refresh_edit_prediction(false, true, window, cx) - .is_none(); - if is_copilot_disabled { - cx.propagate(); - } - } - } - - pub fn previous_edit_prediction( - &mut self, - _: &PreviousEditPrediction, - window: &mut Window, - cx: &mut Context, - ) { - if self.has_active_edit_prediction() { - self.cycle_edit_prediction(Direction::Prev, window, cx); - } else { - let is_copilot_disabled = self - .refresh_edit_prediction(false, true, window, cx) - .is_none(); - if is_copilot_disabled { - cx.propagate(); - } - } - } - - pub fn accept_edit_prediction( + pub fn accept_partial_edit_prediction( &mut self, - _: &AcceptEditPrediction, + granularity: EditPredictionGranularity, window: &mut Window, cx: &mut Context, ) { @@ -7427,47 +7515,59 @@ impl Editor { return; }; + if !matches!(granularity, EditPredictionGranularity::Full) && self.selections.count() != 1 { + return; + } + match &active_edit_prediction.completion { EditPrediction::MoveWithin { target, .. } => { let target = *target; - if let Some(position_map) = &self.last_position_map { - if position_map - .visible_row_range - .contains(&target.to_display_point(&position_map.snapshot).row()) - || !self.edit_prediction_requires_modifier() - { - self.unfold_ranges(&[target..target], true, false, cx); - // Note that this is also done in vim's handler of the Tab action. - self.change_selections( - SelectionEffects::scroll(Autoscroll::newest()), - window, - cx, - |selections| { - selections.select_anchor_ranges([target..target]); - }, - ); - self.clear_row_highlights::(); + if matches!(granularity, EditPredictionGranularity::Full) { + if let Some(position_map) = &self.last_position_map { + let target_row = target.to_display_point(&position_map.snapshot).row(); + let is_visible = position_map.visible_row_range.contains(&target_row); - self.edit_prediction_preview - .set_previous_scroll_position(None); - } else { - self.edit_prediction_preview - .set_previous_scroll_position(Some( - position_map.snapshot.scroll_anchor, - )); - - self.highlight_rows::( - target..target, - cx.theme().colors().editor_highlighted_line_background, - RowHighlightOptions { - autoscroll: true, - ..Default::default() - }, - cx, - ); - self.request_autoscroll(Autoscroll::fit(), cx); + if is_visible || !self.edit_prediction_requires_modifier() { + self.unfold_ranges(&[target..target], true, false, cx); + self.change_selections( + SelectionEffects::scroll(Autoscroll::newest()), + window, + cx, + |selections| { + selections.select_anchor_ranges([target..target]); + }, + ); + self.clear_row_highlights::(); + self.edit_prediction_preview + .set_previous_scroll_position(None); + } else { + // Highlight and request scroll + self.edit_prediction_preview + .set_previous_scroll_position(Some( + position_map.snapshot.scroll_anchor, + )); + self.highlight_rows::( + target..target, + cx.theme().colors().editor_highlighted_line_background, + RowHighlightOptions { + autoscroll: true, + ..Default::default() + }, + cx, + ); + self.request_autoscroll(Autoscroll::fit(), cx); + } } + } else { + self.change_selections( + SelectionEffects::scroll(Autoscroll::newest()), + window, + cx, + |selections| { + selections.select_anchor_ranges([target..target]); + }, + ); } } EditPrediction::MoveOutside { snapshot, target } => { @@ -7483,126 +7583,131 @@ impl Editor { cx, ); - if let Some(provider) = self.edit_prediction_provider() { - provider.accept(cx); - } + match granularity { + EditPredictionGranularity::Full => { + if let Some(provider) = self.edit_prediction_provider() { + provider.accept(cx); + } - // Store the transaction ID and selections before applying the edit - let transaction_id_prev = self.buffer.read(cx).last_transaction_id(cx); + let transaction_id_prev = self.buffer.read(cx).last_transaction_id(cx); + let snapshot = self.buffer.read(cx).snapshot(cx); + let last_edit_end = edits.last().unwrap().0.end.bias_right(&snapshot); - let snapshot = self.buffer.read(cx).snapshot(cx); - let last_edit_end = edits.last().unwrap().0.end.bias_right(&snapshot); + self.buffer.update(cx, |buffer, cx| { + buffer.edit(edits.iter().cloned(), None, cx) + }); - self.buffer.update(cx, |buffer, cx| { - buffer.edit(edits.iter().cloned(), None, cx) - }); + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_anchor_ranges([last_edit_end..last_edit_end]); + }); - self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_anchor_ranges([last_edit_end..last_edit_end]); - }); + let selections = self.selections.disjoint_anchors_arc(); + if let Some(transaction_id_now) = + self.buffer.read(cx).last_transaction_id(cx) + { + if transaction_id_prev != Some(transaction_id_now) { + self.selection_history + .insert_transaction(transaction_id_now, selections); + } + } - let selections = self.selections.disjoint_anchors_arc(); - if let Some(transaction_id_now) = self.buffer.read(cx).last_transaction_id(cx) { - let has_new_transaction = transaction_id_prev != Some(transaction_id_now); - if has_new_transaction { - self.selection_history - .insert_transaction(transaction_id_now, selections); + self.update_visible_edit_prediction(window, cx); + if self.active_edit_prediction.is_none() { + self.refresh_edit_prediction(true, true, window, cx); + } + cx.notify(); } - } + _ => { + let snapshot = self.buffer.read(cx).snapshot(cx); + let cursor_offset = self + .selections + .newest::(&self.display_snapshot(cx)) + .head(); + + let insertion = edits.iter().find_map(|(range, text)| { + let range = range.to_offset(&snapshot); + if range.is_empty() && range.start == cursor_offset { + Some(text) + } else { + None + } + }); - self.update_visible_edit_prediction(window, cx); - if self.active_edit_prediction.is_none() { - self.refresh_edit_prediction(true, true, window, cx); - } + if let Some(text) = insertion { + let text_to_insert = match granularity { + EditPredictionGranularity::Word => { + let mut partial = text + .chars() + .by_ref() + .take_while(|c| c.is_alphabetic()) + .collect::(); + if partial.is_empty() { + partial = text + .chars() + .by_ref() + .take_while(|c| c.is_whitespace() || !c.is_alphabetic()) + .collect::(); + } + partial + } + EditPredictionGranularity::Line => { + if let Some(line) = text.split_inclusive('\n').next() { + line.to_string() + } else { + text.to_string() + } + } + EditPredictionGranularity::Full => unreachable!(), + }; - cx.notify(); + cx.emit(EditorEvent::InputHandled { + utf16_range_to_replace: None, + text: text_to_insert.clone().into(), + }); + + self.insert_with_autoindent_mode(&text_to_insert, None, window, cx); + self.refresh_edit_prediction(true, true, window, cx); + cx.notify(); + } else { + self.accept_partial_edit_prediction( + EditPredictionGranularity::Full, + window, + cx, + ); + } + } + } } } self.edit_prediction_requires_modifier_in_indent_conflict = false; } - pub fn accept_partial_edit_prediction( + pub fn accept_next_word_edit_prediction( &mut self, - _: &AcceptPartialEditPrediction, + _: &AcceptNextWordEditPrediction, window: &mut Window, cx: &mut Context, ) { - let Some(active_edit_prediction) = self.active_edit_prediction.as_ref() else { - return; - }; - if self.selections.count() != 1 { - return; - } - - match &active_edit_prediction.completion { - EditPrediction::MoveWithin { target, .. } => { - let target = *target; - self.change_selections( - SelectionEffects::scroll(Autoscroll::newest()), - window, - cx, - |selections| { - selections.select_anchor_ranges([target..target]); - }, - ); - } - EditPrediction::MoveOutside { snapshot, target } => { - if let Some(workspace) = self.workspace() { - Self::open_editor_at_anchor(snapshot, *target, &workspace, window, cx) - .detach_and_log_err(cx); - } - } - EditPrediction::Edit { edits, .. } => { - self.report_edit_prediction_event( - active_edit_prediction.completion_id.clone(), - true, - cx, - ); - - // Find an insertion that starts at the cursor position. - let snapshot = self.buffer.read(cx).snapshot(cx); - let cursor_offset = self - .selections - .newest::(&self.display_snapshot(cx)) - .head(); - let insertion = edits.iter().find_map(|(range, text)| { - let range = range.to_offset(&snapshot); - if range.is_empty() && range.start == cursor_offset { - Some(text) - } else { - None - } - }); - - if let Some(text) = insertion { - let mut partial_completion = text - .chars() - .by_ref() - .take_while(|c| c.is_alphabetic()) - .collect::(); - if partial_completion.is_empty() { - partial_completion = text - .chars() - .by_ref() - .take_while(|c| c.is_whitespace() || !c.is_alphabetic()) - .collect::(); - } - - cx.emit(EditorEvent::InputHandled { - utf16_range_to_replace: None, - text: partial_completion.clone().into(), - }); + self.accept_partial_edit_prediction(EditPredictionGranularity::Word, window, cx); + } - self.insert_with_autoindent_mode(&partial_completion, None, window, cx); + pub fn accept_next_line_edit_prediction( + &mut self, + _: &AcceptNextLineEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + self.accept_partial_edit_prediction(EditPredictionGranularity::Line, window, cx); + } - self.refresh_edit_prediction(true, true, window, cx); - cx.notify(); - } else { - self.accept_edit_prediction(&Default::default(), window, cx); - } - } - } + pub fn accept_edit_prediction( + &mut self, + _: &AcceptEditPrediction, + window: &mut Window, + cx: &mut Context, + ) { + self.accept_partial_edit_prediction(EditPredictionGranularity::Full, window, cx); } fn discard_edit_prediction( @@ -7822,21 +7927,23 @@ impl Editor { cx: &mut Context, ) { let mut modifiers_held = false; - if let Some(accept_keystroke) = self - .accept_edit_prediction_keybind(false, window, cx) - .keystroke() - { - modifiers_held = modifiers_held - || (accept_keystroke.modifiers() == modifiers - && accept_keystroke.modifiers().modified()); - }; - if let Some(accept_partial_keystroke) = self - .accept_edit_prediction_keybind(true, window, cx) - .keystroke() - { - modifiers_held = modifiers_held - || (accept_partial_keystroke.modifiers() == modifiers - && accept_partial_keystroke.modifiers().modified()); + + // Check bindings for all granularities. + // If the user holds the key for Word, Line, or Full, we want to show the preview. + let granularities = [ + EditPredictionGranularity::Full, + EditPredictionGranularity::Line, + EditPredictionGranularity::Word, + ]; + + for granularity in granularities { + if let Some(keystroke) = self + .accept_edit_prediction_keybind(granularity, window, cx) + .keystroke() + { + modifiers_held = modifiers_held + || (keystroke.modifiers() == modifiers && keystroke.modifiers().modified()); + } } if modifiers_held { @@ -7844,6 +7951,10 @@ impl Editor { self.edit_prediction_preview, EditPredictionPreview::Inactive { .. } ) { + if let Some(provider) = self.edit_prediction_provider.as_ref() { + provider.provider.did_show(cx) + } + self.edit_prediction_preview = EditPredictionPreview::Active { previous_scroll_position: None, since: Instant::now(), @@ -7936,10 +8047,17 @@ impl Editor { if self.edit_prediction_indent_conflict { let cursor_point = cursor.to_point(&multibuffer); + let mut suggested_indent = None; + multibuffer.suggested_indents_callback( + cursor_point.row..cursor_point.row + 1, + |_, indent| { + suggested_indent = Some(indent); + ControlFlow::Break(()) + }, + cx, + ); - let indents = multibuffer.suggested_indents(cursor_point.row..cursor_point.row + 1, cx); - - if let Some((_, indent)) = indents.iter().next() + if let Some(indent) = suggested_indent && indent.len == cursor_point.column { self.edit_prediction_indent_conflict = false; @@ -7949,12 +8067,12 @@ impl Editor { let edit_prediction = provider.suggest(&buffer, cursor_buffer_position, cx)?; let (completion_id, edits, edit_preview) = match edit_prediction { - edit_prediction::EditPrediction::Local { + edit_prediction_types::EditPrediction::Local { id, edits, edit_preview, } => (id, edits, edit_preview), - edit_prediction::EditPrediction::Jump { + edit_prediction_types::EditPrediction::Jump { id, snapshot, target, @@ -8023,6 +8141,9 @@ impl Editor { && !self.edit_predictions_hidden_for_vim_mode; if show_completions_in_buffer { + if let Some(provider) = &self.edit_prediction_provider { + provider.provider.did_show(cx); + } if edits .iter() .all(|(range, _)| range.to_offset(&multibuffer).is_empty()) @@ -8092,7 +8213,7 @@ impl Editor { Some(()) } - pub fn edit_prediction_provider(&self) -> Option> { + pub fn edit_prediction_provider(&self) -> Option> { Some(self.edit_prediction_provider.as_ref()?.provider.clone()) } @@ -8155,8 +8276,7 @@ impl Editor { cx, ); for (breakpoint, state) in breakpoints { - let multi_buffer_anchor = - Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), breakpoint.position); + let multi_buffer_anchor = Anchor::in_buffer(excerpt_id, breakpoint.position); let position = multi_buffer_anchor .to_point(&multi_buffer_snapshot) .to_display_point(&snapshot); @@ -8368,8 +8488,14 @@ impl Editor { (true, true) => ui::IconName::DebugDisabledLogBreakpoint, }; + let color = cx.theme().colors(); + let color = if is_phantom { - Color::Hint + if collides_with_existing { + Color::Custom(color.debugger_accent.blend(color.text.opacity(0.6))) + } else { + Color::Hint + } } else if is_rejected { Color::Disabled } else { @@ -8419,7 +8545,7 @@ impl Editor { BreakpointEditAction::Toggle }; - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.edit_breakpoint_at_anchor( position, breakpoint.as_ref().clone(), @@ -8549,16 +8675,17 @@ impl Editor { let snapshot = self.buffer.read(cx).snapshot(cx); let offset = self .selections - .newest::(&self.display_snapshot(cx)) + .newest::(&self.display_snapshot(cx)) .head(); - let excerpt = snapshot.excerpt_containing(offset..offset)?; + let mut excerpt = snapshot.excerpt_containing(offset..offset)?; + let offset = excerpt.map_offset_to_buffer(offset); let buffer_id = excerpt.buffer().remote_id(); let layer = excerpt.buffer().syntax_layer_at(offset)?; let mut cursor = layer.node().walk(); - while cursor.goto_first_child_for_byte(offset).is_some() { - if cursor.node().end_byte() == offset { + while cursor.goto_first_child_for_byte(offset.0).is_some() { + if cursor.node().end_byte() == offset.0 { cursor.goto_next_sibling(); } } @@ -8570,7 +8697,7 @@ impl Editor { let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; // Check if this node contains our offset - if node_range.start <= offset && node_range.end >= offset { + if node_range.start <= offset.0 && node_range.end >= offset.0 { // If it contains offset, check for task if let Some(tasks) = self.tasks.get(&(buffer_id, symbol_start_row)) { let buffer = self.buffer.read(cx).buffer(buffer_id)?; @@ -8610,7 +8737,7 @@ impl Editor { ClickEvent::Mouse(e) => e.down.button == MouseButton::Left, }; - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.toggle_code_actions( &ToggleCodeActions { deployed_from: Some(CodeActionSource::RunMenu(row)), @@ -8764,23 +8891,13 @@ impl Editor { cx, ), EditPrediction::MoveOutside { snapshot, .. } => { - let file_name = snapshot - .file() - .map(|file| file.file_name(cx)) - .unwrap_or("untitled"); let mut element = self - .render_edit_prediction_line_popover( - format!("Jump to {file_name}"), - Some(IconName::ZedPredict), - window, - cx, - ) + .render_edit_prediction_jump_outside_popover(snapshot, window, cx) .into_any(); let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let origin_x = text_bounds.size.width / 2. - size.width / 2.; - let origin_y = text_bounds.size.height - size.height - px(30.); - let origin = text_bounds.origin + gpui::Point::new(origin_x, origin_y); + let origin_x = text_bounds.size.width - size.width - px(30.); + let origin = text_bounds.origin + gpui::Point::new(origin_x, px(16.)); element.prepaint_at(origin, window, cx); Some((element, origin)) @@ -9006,7 +9123,8 @@ impl Editor { let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); - let line_origin = self.display_to_pixel_point(target_line_end, editor_snapshot, window)?; + let line_origin = + self.display_to_pixel_point(target_line_end, editor_snapshot, window, cx)?; let start_point = content_origin - point(scroll_pixel_position.x.into(), Pixels::ZERO); let mut origin = start_point @@ -9245,7 +9363,8 @@ impl Editor { window: &mut Window, cx: &mut App, ) -> Option { - let accept_binding = self.accept_edit_prediction_keybind(false, window, cx); + let accept_binding = + self.accept_edit_prediction_keybind(EditPredictionGranularity::Full, window, cx); let accept_keystroke = accept_binding.keystroke()?; let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac; @@ -9345,9 +9464,70 @@ impl Editor { }) } - fn edit_prediction_line_popover_bg_color(cx: &App) -> Hsla { - let accent_color = cx.theme().colors().text_accent; - let editor_bg_color = cx.theme().colors().editor_background; + fn render_edit_prediction_jump_outside_popover( + &self, + snapshot: &BufferSnapshot, + window: &mut Window, + cx: &mut App, + ) -> Stateful
{ + let keybind = self.render_edit_prediction_accept_keybind(window, cx); + let has_keybind = keybind.is_some(); + + let file_name = snapshot + .file() + .map(|file| SharedString::new(file.file_name(cx))) + .unwrap_or(SharedString::new_static("untitled")); + + h_flex() + .id("ep-jump-outside-popover") + .py_1() + .px_2() + .gap_1() + .rounded_md() + .border_1() + .bg(Self::edit_prediction_line_popover_bg_color(cx)) + .border_color(Self::edit_prediction_callout_popover_border_color(cx)) + .shadow_xs() + .when(!has_keybind, |el| { + let status_colors = cx.theme().status(); + + el.bg(status_colors.error_background) + .border_color(status_colors.error.opacity(0.6)) + .pl_2() + .child(Icon::new(IconName::ZedPredictError).color(Color::Error)) + .cursor_default() + .hoverable_tooltip(move |_window, cx| { + cx.new(|_| MissingEditPredictionKeybindingTooltip).into() + }) + }) + .children(keybind) + .child( + Label::new(file_name) + .size(LabelSize::Small) + .buffer_font(cx) + .when(!has_keybind, |el| { + el.color(cx.theme().status().error.into()).strikethrough() + }), + ) + .when(!has_keybind, |el| { + el.child( + h_flex().ml_1().child( + Icon::new(IconName::Info) + .size(IconSize::Small) + .color(cx.theme().status().error.into()), + ), + ) + }) + .child( + div() + .mt(px(1.5)) + .child(Icon::new(IconName::ArrowUpRight).size(IconSize::Small)), + ) + } + + fn edit_prediction_line_popover_bg_color(cx: &App) -> Hsla { + let accent_color = cx.theme().colors().text_accent; + let editor_bg_color = cx.theme().colors().editor_background; editor_bg_color.blend(accent_color.opacity(0.1)) } @@ -9357,7 +9537,7 @@ impl Editor { editor_bg_color.blend(accent_color.opacity(0.6)) } fn get_prediction_provider_icon_name( - provider: &Option, + provider: &Option, ) -> IconName { match provider { Some(provider) => match provider.provider.name() { @@ -9687,8 +9867,7 @@ impl Editor { } pub fn render_context_menu( - &self, - style: &EditorStyle, + &mut self, max_height_in_lines: u32, window: &mut Window, cx: &mut Context, @@ -9698,7 +9877,9 @@ impl Editor { if !menu.visible() { return None; }; - Some(menu.render(style, max_height_in_lines, window, cx)) + self.style + .as_ref() + .map(|style| menu.render(style, max_height_in_lines, window, cx)) } fn render_context_menu_aside( @@ -9758,13 +9939,16 @@ impl Editor { let id = post_inc(&mut self.next_completion_id); let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; - *self.context_menu.borrow_mut() = Some(CodeContextMenu::Completions( + let mut context_menu = self.context_menu.borrow_mut(); + let old_menu = context_menu.take(); + *context_menu = Some(CodeContextMenu::Completions( CompletionsMenu::new_snippet_choices( id, true, choices, selection, buffer, + old_menu.map(|menu| menu.primary_scroll_handle()), snippet_sort_order, ), )); @@ -9772,7 +9956,7 @@ impl Editor { pub fn insert_snippet( &mut self, - insertion_ranges: &[Range], + insertion_ranges: &[Range], snippet: Snippet, window: &mut Window, cx: &mut Context, @@ -9809,14 +9993,13 @@ impl Editor { .flat_map(|tabstop_range| { let mut delta = 0_isize; insertion_ranges.iter().map(move |insertion_range| { - let insertion_start = insertion_range.start as isize + delta; - delta += - snippet.text.len() as isize - insertion_range.len() as isize; - - let start = ((insertion_start + tabstop_range.start) as usize) - .min(snapshot.len()); - let end = ((insertion_start + tabstop_range.end) as usize) - .min(snapshot.len()); + let insertion_start = insertion_range.start + delta; + delta += snippet.text.len() as isize + - (insertion_range.end - insertion_range.start) as isize; + + let start = + (insertion_start + tabstop_range.start).min(snapshot.len()); + let end = (insertion_start + tabstop_range.end).min(snapshot.len()); snapshot.anchor_before(start)..snapshot.anchor_after(end) }) }) @@ -10466,7 +10649,9 @@ impl Editor { cx, ); }); - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); }); } @@ -10483,7 +10668,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into_iter() .map(|s| s.range()); @@ -10491,7 +10676,9 @@ impl Editor { this.buffer.update(cx, |buffer, cx| { buffer.autoindent_ranges(selections, cx); }); - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); }); } @@ -10529,7 +10716,7 @@ impl Editor { } else { // If there isn't a line after the range, delete the \n from the line before the // start of the row range - edit_start = edit_start.saturating_sub(1); + edit_start = edit_start.saturating_sub_usize(1); (buffer.len(), rows.start.previous_row()) }; @@ -10780,7 +10967,9 @@ impl Editor { boundaries.into_iter() { let open_offset = start_before.to_offset(&buffer) + start_prefix_len; - let close_offset = end_after.to_offset(&buffer).saturating_sub(end_suffix_len); + let close_offset = end_after + .to_offset(&buffer) + .saturating_sub_usize(end_suffix_len); new_selections.push(open_offset..open_offset); new_selections.push(close_offset..close_offset); } @@ -10810,7 +10999,10 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_ids = HashSet::default(); let snapshot = self.buffer().read(cx).snapshot(cx); - for selection in self.selections.all::(&self.display_snapshot(cx)) { + for selection in self + .selections + .all::(&self.display_snapshot(cx)) + { buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) } @@ -10960,7 +11152,7 @@ impl Editor { }]; let focus_handle = bp_prompt.focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); let block_ids = self.insert_blocks(blocks, None, cx); bp_prompt.update(cx, |prompt, _| { @@ -11037,6 +11229,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.breakpoint_store.is_none() { + return; + } + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { let breakpoint = breakpoint.unwrap_or_else(|| Breakpoint { message: None, @@ -11096,6 +11292,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.breakpoint_store.is_none() { + return; + } + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_disabled()) else { continue; @@ -11115,6 +11315,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.breakpoint_store.is_none() { + return; + } + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { let Some(breakpoint) = breakpoint.filter(|breakpoint| breakpoint.is_enabled()) else { continue; @@ -11134,6 +11338,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + if self.breakpoint_store.is_none() { + return; + } + for (anchor, breakpoint) in self.breakpoints_at_cursors(window, cx) { if let Some(breakpoint) = breakpoint { self.edit_breakpoint_at_anchor( @@ -11209,7 +11417,7 @@ impl Editor { .read(cx) .base_text() .as_rope() - .slice(hunk.diff_base_byte_range.clone()); + .slice(hunk.diff_base_byte_range.start.0..hunk.diff_base_byte_range.end.0); let buffer_snapshot = buffer.snapshot(); let buffer_revert_changes = revert_changes.entry(buffer.remote_id()).or_default(); if let Err(i) = buffer_revert_changes.binary_search_by(|probe| { @@ -11234,6 +11442,168 @@ impl Editor { self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut rand::rng())) } + pub fn rotate_selections_forward( + &mut self, + _: &RotateSelectionsForward, + window: &mut Window, + cx: &mut Context, + ) { + self.rotate_selections(window, cx, false) + } + + pub fn rotate_selections_backward( + &mut self, + _: &RotateSelectionsBackward, + window: &mut Window, + cx: &mut Context, + ) { + self.rotate_selections(window, cx, true) + } + + fn rotate_selections(&mut self, window: &mut Window, cx: &mut Context, reverse: bool) { + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + let display_snapshot = self.display_snapshot(cx); + let selections = self.selections.all::(&display_snapshot); + + if selections.len() < 2 { + return; + } + + let (edits, new_selections) = { + let buffer = self.buffer.read(cx).read(cx); + let has_selections = selections.iter().any(|s| !s.is_empty()); + if has_selections { + let mut selected_texts: Vec = selections + .iter() + .map(|selection| { + buffer + .text_for_range(selection.start..selection.end) + .collect() + }) + .collect(); + + if reverse { + selected_texts.rotate_left(1); + } else { + selected_texts.rotate_right(1); + } + + let mut offset_delta: i64 = 0; + let mut new_selections = Vec::new(); + let edits: Vec<_> = selections + .iter() + .zip(selected_texts.iter()) + .map(|(selection, new_text)| { + let old_len = (selection.end.0 - selection.start.0) as i64; + let new_len = new_text.len() as i64; + let adjusted_start = + MultiBufferOffset((selection.start.0 as i64 + offset_delta) as usize); + let adjusted_end = + MultiBufferOffset((adjusted_start.0 as i64 + new_len) as usize); + + new_selections.push(Selection { + id: selection.id, + start: adjusted_start, + end: adjusted_end, + reversed: selection.reversed, + goal: selection.goal, + }); + + offset_delta += new_len - old_len; + (selection.start..selection.end, new_text.clone()) + }) + .collect(); + (edits, new_selections) + } else { + let mut all_rows: Vec = selections + .iter() + .map(|selection| buffer.offset_to_point(selection.start).row) + .collect(); + all_rows.sort_unstable(); + all_rows.dedup(); + + if all_rows.len() < 2 { + return; + } + + let line_ranges: Vec> = all_rows + .iter() + .map(|&row| { + let start = Point::new(row, 0); + let end = Point::new(row, buffer.line_len(MultiBufferRow(row))); + buffer.point_to_offset(start)..buffer.point_to_offset(end) + }) + .collect(); + + let mut line_texts: Vec = line_ranges + .iter() + .map(|range| buffer.text_for_range(range.clone()).collect()) + .collect(); + + if reverse { + line_texts.rotate_left(1); + } else { + line_texts.rotate_right(1); + } + + let edits = line_ranges + .iter() + .zip(line_texts.iter()) + .map(|(range, new_text)| (range.clone(), new_text.clone())) + .collect(); + + let num_rows = all_rows.len(); + let row_to_index: std::collections::HashMap = all_rows + .iter() + .enumerate() + .map(|(i, &row)| (row, i)) + .collect(); + + // Compute new line start offsets after rotation (handles CRLF) + let newline_len = line_ranges[1].start.0 - line_ranges[0].end.0; + let first_line_start = line_ranges[0].start.0; + let mut new_line_starts: Vec = vec![first_line_start]; + for text in line_texts.iter().take(num_rows - 1) { + let prev_start = *new_line_starts.last().unwrap(); + new_line_starts.push(prev_start + text.len() + newline_len); + } + + let new_selections = selections + .iter() + .map(|selection| { + let point = buffer.offset_to_point(selection.start); + let old_index = row_to_index[&point.row]; + let new_index = if reverse { + (old_index + num_rows - 1) % num_rows + } else { + (old_index + 1) % num_rows + }; + let new_offset = + MultiBufferOffset(new_line_starts[new_index] + point.column as usize); + Selection { + id: selection.id, + start: new_offset, + end: new_offset, + reversed: selection.reversed, + goal: selection.goal, + } + }) + .collect(); + + (edits, new_selections) + } + }; + + self.transact(window, cx, |this, window, cx| { + this.buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + this.change_selections(Default::default(), window, cx, |s| { + s.select(new_selections); + }); + }); + } + fn manipulate_lines( &mut self, window: &mut Window, @@ -11649,7 +12019,7 @@ impl Editor { let mut new_selections = Vec::new(); let mut edits = Vec::new(); - let mut selection_adjustment = 0i32; + let mut selection_adjustment = 0isize; for selection in self.selections.all_adjusted(&self.display_snapshot(cx)) { let selection_is_empty = selection.is_empty(); @@ -11665,18 +12035,20 @@ impl Editor { }; let text = buffer.text_for_range(start..end).collect::(); - let old_length = text.len() as i32; + let old_length = text.len() as isize; let text = callback(&text); new_selections.push(Selection { - start: (start as i32 - selection_adjustment) as usize, - end: ((start + text.len()) as i32 - selection_adjustment) as usize, + start: MultiBufferOffset((start.0 as isize - selection_adjustment) as usize), + end: MultiBufferOffset( + ((start.0 + text.len()) as isize - selection_adjustment) as usize, + ), goal: SelectionGoal::None, id: selection.id, reversed: selection.reversed, }); - selection_adjustment += old_length - text.len() as i32; + selection_adjustment += old_length - text.len() as isize; edits.push((start..end, text)); } @@ -12093,7 +12465,7 @@ impl Editor { let text_layout_details = &self.text_layout_details(window); self.transact(window, cx, |this, window, cx| { let edits = this.change_selections(Default::default(), window, cx, |s| { - let mut edits: Vec<(Range, String)> = Default::default(); + let mut edits: Vec<(Range, String)> = Default::default(); s.move_with(|display_map, selection| { if !selection.is_empty() { return; @@ -12104,10 +12476,10 @@ impl Editor { if head.column() == display_map.line_len(head.row()) { transpose_offset = display_map .buffer_snapshot() - .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + .clip_offset(transpose_offset.saturating_sub_usize(1), Bias::Left); } - if transpose_offset == 0 { + if transpose_offset == MultiBufferOffset(0) { return; } @@ -12122,11 +12494,11 @@ impl Editor { let transpose_start = display_map .buffer_snapshot() - .clip_offset(transpose_offset.saturating_sub(1), Bias::Left); + .clip_offset(transpose_offset.saturating_sub_usize(1), Bias::Left); if edits.last().is_none_or(|e| e.0.end <= transpose_start) { let transpose_end = display_map .buffer_snapshot() - .clip_offset(transpose_offset + 1, Bias::Right); + .clip_offset(transpose_offset + 1usize, Bias::Right); if let Some(ch) = display_map .buffer_snapshot() .chars_at(transpose_start) @@ -12141,7 +12513,9 @@ impl Editor { }); this.buffer .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| { s.select(selections); }); @@ -12541,6 +12915,7 @@ impl Editor { { let max_point = buffer.max_point(); let mut is_first = true; + let mut prev_selection_was_entire_line = false; for selection in &mut selections { let is_entire_line = (selection.is_empty() && cut_no_selection_line) || self.selections.line_mode(); @@ -12555,21 +12930,24 @@ impl Editor { } if is_first { is_first = false; - } else { + } else if !prev_selection_was_entire_line { text += "\n"; } + prev_selection_was_entire_line = is_entire_line; let mut len = 0; for chunk in buffer.text_for_range(selection.start..selection.end) { text.push_str(chunk); len += chunk.len(); } - clipboard_selections.push(ClipboardSelection { + + clipboard_selections.push(ClipboardSelection::for_buffer( len, is_entire_line, - first_line_indent: buffer - .indent_size_for_line(MultiBufferRow(selection.start.row)) - .len, - }); + selection.range(), + &buffer, + self.project.as_ref(), + cx, + )); } } @@ -12640,6 +13018,7 @@ impl Editor { { let max_point = buffer.max_point(); let mut is_first = true; + let mut prev_selection_was_entire_line = false; for selection in &selections { let mut start = selection.start; let mut end = selection.end; @@ -12698,9 +13077,10 @@ impl Editor { for trimmed_range in trimmed_selections { if is_first { is_first = false; - } else { + } else if !prev_selection_was_entire_line { text += "\n"; } + prev_selection_was_entire_line = is_entire_line; let mut len = 0; for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { text.push_str(chunk); @@ -12710,13 +13090,14 @@ impl Editor { text.push('\n'); len += 1; } - clipboard_selections.push(ClipboardSelection { + clipboard_selections.push(ClipboardSelection::for_buffer( len, is_entire_line, - first_line_indent: buffer - .indent_size_for_line(MultiBufferRow(trimmed_range.start.row)) - .len, - }); + trimmed_range, + &buffer, + self.project.as_ref(), + cx, + )); } } } @@ -12744,8 +13125,11 @@ impl Editor { self.transact(window, cx, |this, window, cx| { let had_active_edit_prediction = this.has_active_edit_prediction(); let display_map = this.display_snapshot(cx); - let old_selections = this.selections.all::(&display_map); - let cursor_offset = this.selections.last::(&display_map).head(); + let old_selections = this.selections.all::(&display_map); + let cursor_offset = this + .selections + .last::(&display_map) + .head(); if let Some(mut clipboard_selections) = clipboard_selections { let all_selections_were_entire_line = @@ -12774,7 +13158,11 @@ impl Editor { let end_offset = start_offset + clipboard_selection.len; to_insert = &clipboard_text[start_offset..end_offset]; entire_line = clipboard_selection.is_entire_line; - start_offset = end_offset + 1; + start_offset = if entire_line { + end_offset + } else { + end_offset + 1 + }; original_indent_column = Some(clipboard_selection.first_line_indent); } else { to_insert = &*clipboard_text; @@ -12826,7 +13214,9 @@ impl Editor { ); }); - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); } else { let url = url::Url::parse(&clipboard_text).ok(); @@ -12895,7 +13285,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); if selections.is_empty() { log::warn!("There should always be at least one selection in Zed. This is a bug."); @@ -14148,7 +14540,7 @@ impl Editor { } self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![0..0]); + s.select_ranges(vec![Anchor::min()..Anchor::min()]); }); } @@ -14237,7 +14629,9 @@ impl Editor { pub fn select_to_end(&mut self, _: &SelectToEnd, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); - let mut selection = self.selections.first::(&self.display_snapshot(cx)); + let mut selection = self + .selections + .first::(&self.display_snapshot(cx)); selection.set_head(buffer.len(), SelectionGoal::None); self.change_selections(Default::default(), window, cx, |s| { s.select(vec![selection]); @@ -14246,9 +14640,8 @@ impl Editor { pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let end = self.buffer.read(cx).read(cx).len(); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(vec![0..end]); + s.select_ranges(vec![Anchor::min()..Anchor::max()]); }); } @@ -14506,9 +14899,55 @@ impl Editor { } } + pub fn insert_snippet_at_selections( + &mut self, + action: &InsertSnippet, + window: &mut Window, + cx: &mut Context, + ) { + self.try_insert_snippet_at_selections(action, window, cx) + .log_err(); + } + + fn try_insert_snippet_at_selections( + &mut self, + action: &InsertSnippet, + window: &mut Window, + cx: &mut Context, + ) -> Result<()> { + let insertion_ranges = self + .selections + .all::(&self.display_snapshot(cx)) + .into_iter() + .map(|selection| selection.range()) + .collect_vec(); + + let snippet = if let Some(snippet_body) = &action.snippet { + if action.language.is_none() && action.name.is_none() { + Snippet::parse(snippet_body)? + } else { + bail!("`snippet` is mutually exclusive with `language` and `name`") + } + } else if let Some(name) = &action.name { + let project = self.project().context("no project")?; + let snippet_store = project.read(cx).snippets().read(cx); + let snippet = snippet_store + .snippets_for(action.language.clone(), cx) + .into_iter() + .find(|snippet| snippet.name == *name) + .context("snippet not found")?; + Snippet::parse(&snippet.body)? + } else { + // todo(andrew): open modal to select snippet + bail!("`name` or `snippet` is required") + }; + + self.insert_snippet(&insertion_ranges, snippet, window, cx) + } + fn select_match_ranges( &mut self, - range: Range, + range: Range, reversed: bool, replace_newest: bool, auto_scroll: Option, @@ -14547,7 +14986,7 @@ impl Editor { cx: &mut Context, ) -> Result<()> { let buffer = display_map.buffer_snapshot(); - let mut selections = self.selections.all::(&display_map); + let mut selections = self.selections.all::(&display_map); if let Some(mut select_next_state) = self.select_next_state.take() { let query = &select_next_state.query; if !select_next_state.done { @@ -14557,14 +14996,15 @@ impl Editor { let bytes_after_last_selection = buffer.bytes_in_range(last_selection.end..buffer.len()); - let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); + let bytes_before_first_selection = + buffer.bytes_in_range(MultiBufferOffset(0)..first_selection.start); let query_matches = query .stream_find_iter(bytes_after_last_selection) .map(|result| (last_selection.end, result)) .chain( query .stream_find_iter(bytes_before_first_selection) - .map(|result| (0, result)), + .map(|result| (MultiBufferOffset(0), result)), ); for (start_offset, query_match) in query_matches { @@ -14622,7 +15062,7 @@ impl Editor { } if let Some(next_selection) = selections_iter.peek() { - if next_selection.range().len() == selection.range().len() { + if next_selection.len() == selection.len() { let next_selected_text = buffer .text_for_range(next_selection.range()) .collect::(); @@ -14710,18 +15150,21 @@ impl Editor { let mut new_selections = Vec::new(); - let reversed = self.selections.oldest::(&display_map).reversed; + let reversed = self + .selections + .oldest::(&display_map) + .reversed; let buffer = display_map.buffer_snapshot(); let query_matches = select_next_state .query - .stream_find_iter(buffer.bytes_in_range(0..buffer.len())); + .stream_find_iter(buffer.bytes_in_range(MultiBufferOffset(0)..buffer.len())); for query_match in query_matches.into_iter() { let query_match = query_match.context("query match for select all action")?; // can only fail due to I/O let offset_range = if reversed { - query_match.end()..query_match.start() + MultiBufferOffset(query_match.end())..MultiBufferOffset(query_match.start()) } else { - query_match.start()..query_match.end() + MultiBufferOffset(query_match.start())..MultiBufferOffset(query_match.end()) }; if !select_next_state.wordwise @@ -14774,7 +15217,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); - let mut selections = self.selections.all::(&display_map); + let mut selections = self.selections.all::(&display_map); if let Some(mut select_prev_state) = self.select_prev_state.take() { let query = &select_prev_state.query; if !select_prev_state.done { @@ -14783,7 +15226,7 @@ impl Editor { let mut next_selected_range = None; // When we're iterating matches backwards, the oldest match will actually be the furthest one in the buffer. let bytes_before_last_selection = - buffer.reversed_bytes_in_range(0..last_selection.start); + buffer.reversed_bytes_in_range(MultiBufferOffset(0)..last_selection.start); let bytes_after_first_selection = buffer.reversed_bytes_in_range(first_selection.end..buffer.len()); let query_matches = query @@ -14841,7 +15284,7 @@ impl Editor { } if let Some(next_selection) = selections_iter.peek() { - if next_selection.range().len() == selection.range().len() { + if next_selection.len() == selection.len() { let next_selected_text = buffer .text_for_range(next_selection.range()) .collect::(); @@ -14912,10 +15355,9 @@ impl Editor { I: IntoIterator, P: AsRef<[u8]>, { - let case_sensitive = self.select_next_is_case_sensitive.map_or_else( - || EditorSettings::get_global(cx).search.case_sensitive, - |value| value, - ); + let case_sensitive = self + .select_next_is_case_sensitive + .unwrap_or_else(|| EditorSettings::get_global(cx).search.case_sensitive); let mut builder = AhoCorasickBuilder::new(); builder.ascii_case_insensitive(!case_sensitive); @@ -15287,13 +15729,13 @@ impl Editor { let buffer = self.buffer.read(cx).snapshot(cx); let old_selections = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into_boxed_slice(); fn update_selection( - selection: &Selection, + selection: &Selection, buffer_snap: &MultiBufferSnapshot, - ) -> Option> { + ) -> Option> { let cursor = selection.head(); let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?; for symbol in symbols.iter().rev() { @@ -15345,7 +15787,7 @@ impl Editor { }; let old_selections: Box<[_]> = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into(); if old_selections.is_empty() { return; @@ -15504,7 +15946,7 @@ impl Editor { let buffer = self.buffer.read(cx).snapshot(cx); let selections = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into_iter() // subtracting the offset requires sorting .sorted_by_key(|i| i.start); @@ -15556,7 +15998,7 @@ impl Editor { let mut selections = vec![]; for (id, parent, text) in full_edits { let start = parent.start - offset; - offset += parent.len() - text.len(); + offset += (parent.end - parent.start) - text.len(); selections.push(Selection { id, start, @@ -15578,7 +16020,7 @@ impl Editor { ) { let old_selections: Box<[_]> = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into(); if old_selections.is_empty() { return; @@ -15594,8 +16036,18 @@ impl Editor { .map(|selection| { let old_range = selection.start..selection.end; - if let Some(node) = buffer.syntax_next_sibling(old_range) { - let new_range = node.byte_range(); + let old_range = + old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); + let excerpt = buffer.excerpt_containing(old_range.clone()); + + if let Some(mut excerpt) = excerpt + && let Some(node) = excerpt + .buffer() + .syntax_next_sibling(excerpt.map_range_to_buffer(old_range)) + { + let new_range = excerpt.map_range_from_buffer( + BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), + ); selected_sibling = true; Selection { id: selection.id, @@ -15630,7 +16082,7 @@ impl Editor { ) { let old_selections: Box<[_]> = self .selections - .all::(&self.display_snapshot(cx)) + .all::(&self.display_snapshot(cx)) .into(); if old_selections.is_empty() { return; @@ -15645,9 +16097,18 @@ impl Editor { .iter() .map(|selection| { let old_range = selection.start..selection.end; + let old_range = + old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); + let excerpt = buffer.excerpt_containing(old_range.clone()); - if let Some(node) = buffer.syntax_prev_sibling(old_range) { - let new_range = node.byte_range(); + if let Some(mut excerpt) = excerpt + && let Some(node) = excerpt + .buffer() + .syntax_prev_sibling(excerpt.map_range_to_buffer(old_range)) + { + let new_range = excerpt.map_range_from_buffer( + BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), + ); selected_sibling = true; Selection { id: selection.id, @@ -15796,7 +16257,7 @@ impl Editor { fn fetch_runnable_ranges( snapshot: &DisplaySnapshot, range: Range, - ) -> Vec { + ) -> Vec<(Range, language::RunnableRange)> { snapshot.buffer_snapshot().runnable_ranges(range).collect() } @@ -15804,12 +16265,12 @@ impl Editor { project: Entity, snapshot: DisplaySnapshot, prefer_lsp: bool, - runnable_ranges: Vec, + runnable_ranges: Vec<(Range, language::RunnableRange)>, cx: AsyncWindowContext, ) -> Task> { cx.spawn(async move |cx| { let mut runnable_rows = Vec::with_capacity(runnable_ranges.len()); - for mut runnable in runnable_ranges { + for (run_range, mut runnable) in runnable_ranges { let Some(tasks) = cx .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) .ok() @@ -15827,10 +16288,7 @@ impl Editor { continue; } - let point = runnable - .run_range - .start - .to_point(&snapshot.buffer_snapshot()); + let point = run_range.start.to_point(&snapshot.buffer_snapshot()); let Some(row) = snapshot .buffer_snapshot() .buffer_line_for_row(MultiBufferRow(point.row)) @@ -15845,9 +16303,7 @@ impl Editor { (runnable.buffer_id, row), RunnableTasks { templates: tasks, - offset: snapshot - .buffer_snapshot() - .anchor_before(runnable.run_range.start), + offset: snapshot.buffer_snapshot().anchor_before(run_range.start), context_range, column: point.column, extra_variables: runnable.extra_captures, @@ -15934,7 +16390,7 @@ impl Editor { let mut best_destination = None; for (open, close) in enclosing_bracket_ranges { let close = close.to_inclusive(); - let length = close.end() - open.start; + let length = *close.end() - open.start; let inside = selection.start >= open.end && selection.end <= *close.start(); let in_bracket_range = open.to_inclusive().contains(&selection.head()) || close.contains(&selection.head()); @@ -16193,7 +16649,9 @@ impl Editor { cx: &mut Context, ) { let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self.selections.newest::(&self.display_snapshot(cx)); + let selection = self + .selections + .newest::(&self.display_snapshot(cx)); let mut active_group_id = None; if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics @@ -16204,8 +16662,8 @@ impl Editor { fn filtered<'a>( severity: GoToDiagnosticSeverityFilter, - diagnostics: impl Iterator>, - ) -> impl Iterator> { + diagnostics: impl Iterator>, + ) -> impl Iterator> { diagnostics .filter(move |entry| severity.matches(entry.diagnostic.severity)) .filter(|entry| entry.range.start != entry.range.end) @@ -16215,7 +16673,7 @@ impl Editor { let before = filtered( severity, buffer - .diagnostics_in_range(0..selection.start) + .diagnostics_in_range(MultiBufferOffset(0)..selection.start) .filter(|entry| entry.range.start <= selection.start), ); let after = filtered( @@ -16225,7 +16683,7 @@ impl Editor { .filter(|entry| entry.range.start >= selection.start), ); - let mut found: Option> = None; + let mut found: Option> = None; if direction == Direction::Prev { 'outer: for prev_diagnostics in [before.collect::>(), after.collect::>()] { @@ -16532,7 +16990,7 @@ impl Editor { GoToDefinitionFallback::None => Ok(Navigated::No), GoToDefinitionFallback::FindAllReferences => { match editor.update_in(cx, |editor, window, cx| { - editor.find_all_references(&FindAllReferences, window, cx) + editor.find_all_references(&FindAllReferences::default(), window, cx) })? { Some(references) => references.await, None => Ok(Navigated::No), @@ -16617,7 +17075,7 @@ impl Editor { }; let head = self .selections - .newest::(&self.display_snapshot(cx)) + .newest::(&self.display_snapshot(cx)) .head(); let buffer = self.buffer.read(cx); let Some((buffer, head)) = buffer.text_anchor_for_position(head, cx) else { @@ -16786,10 +17244,6 @@ impl Editor { } if num_locations > 1 { - let Some(workspace) = workspace else { - return Ok(Navigated::No); - }; - let tab_kind = match kind { Some(GotoDefinitionKind::Implementation) => "Implementations", Some(GotoDefinitionKind::Symbol) | None => "Definitions", @@ -16819,13 +17273,20 @@ impl Editor { }) .context("buffer title")?; + let Some(workspace) = workspace else { + return Ok(Navigated::No); + }; + let opened = workspace .update_in(cx, |workspace, window, cx| { + let allow_preview = PreviewTabsSettings::get_global(cx) + .enable_preview_multibuffer_from_code_navigation; Self::open_locations_in_multibuffer( workspace, locations, title, split, + allow_preview, MultibufferSelectionMode::First, window, cx, @@ -16838,14 +17299,23 @@ impl Editor { // If there is one url or file, open it directly match first_url_or_file { Some(Either::Left(url)) => { - cx.update(|_, cx| cx.open_url(&url))?; + cx.update(|window, cx| { + if parse_zed_link(&url, cx).is_some() { + window + .dispatch_action(Box::new(zed_actions::OpenZedUrl { url }), cx); + } else { + cx.open_url(&url); + } + })?; Ok(Navigated::Yes) } Some(Either::Right(path)) => { + // TODO(andrew): respect preview tab settings + // `enable_keep_preview_on_code_navigation` and + // `enable_preview_file_from_code_navigation` let Some(workspace) = workspace else { return Ok(Navigated::No); }; - workspace .update_in(cx, |workspace, window, cx| { workspace.open_resolved_path(path, window, cx) @@ -16856,16 +17326,12 @@ impl Editor { None => Ok(Navigated::No), } } else { - let Some(workspace) = workspace else { - return Ok(Navigated::No); - }; - let (target_buffer, target_ranges) = locations.into_iter().next().unwrap(); let target_range = target_ranges.first().unwrap().clone(); editor.update_in(cx, |editor, window, cx| { let range = target_range.to_point(target_buffer.read(cx)); - let range = editor.range_for_match(&range, false); + let range = editor.range_for_match(&range); let range = collapse_multiline_range(range); if !split @@ -16873,6 +17339,9 @@ impl Editor { { editor.go_to_singleton_buffer_range(range, window, cx); } else { + let Some(workspace) = workspace else { + return Navigated::No; + }; let pane = workspace.read(cx).active_pane().clone(); window.defer(cx, move |window, cx| { let target_editor: Entity = @@ -16883,11 +17352,19 @@ impl Editor { workspace.active_pane().clone() }; + let preview_tabs_settings = PreviewTabsSettings::get_global(cx); + let keep_old_preview = preview_tabs_settings + .enable_keep_preview_on_code_navigation; + let allow_new_preview = preview_tabs_settings + .enable_preview_file_from_code_navigation; + workspace.open_project_item( pane, target_buffer.clone(), true, true, + keep_old_preview, + allow_new_preview, window, cx, ) @@ -16998,9 +17475,6 @@ impl Editor { let multi_buffer = editor.read_with(cx, |editor, _| editor.buffer().clone())?; - let multi_buffer_snapshot = - multi_buffer.read_with(cx, |multi_buffer, cx| multi_buffer.snapshot(cx))?; - let (locations, current_location_index) = multi_buffer.update(cx, |multi_buffer, cx| { let mut locations = locations @@ -17020,6 +17494,7 @@ impl Editor { }) .collect::>(); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); // There is an O(n) implementation, but given this list will be // small (usually <100 items), the extra O(log(n)) factor isn't // worth the (surprisingly large amount of) extra complexity. @@ -17078,18 +17553,21 @@ impl Editor { pub fn find_all_references( &mut self, - _: &FindAllReferences, + action: &FindAllReferences, window: &mut Window, cx: &mut Context, ) -> Option>> { - let selection = self.selections.newest::(&self.display_snapshot(cx)); + let always_open_multibuffer = action.always_open_multibuffer; + let selection = self.selections.newest_anchor(); let multi_buffer = self.buffer.read(cx); - let head = selection.head(); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let selection_offset = selection.map(|anchor| anchor.to_offset(&multi_buffer_snapshot)); + let selection_point = selection.map(|anchor| anchor.to_point(&multi_buffer_snapshot)); + let head = selection_offset.head(); + let head_anchor = multi_buffer_snapshot.anchor_at( head, - if head < selection.tail() { + if head < selection_offset.tail() { Bias::Right } else { Bias::Left @@ -17135,6 +17613,15 @@ impl Editor { let buffer = location.buffer.read(cx); (location.buffer, location.range.to_point(buffer)) }) + // if special-casing the single-match case, remove ranges + // that intersect current selection + .filter(|(location_buffer, location)| { + if always_open_multibuffer || &buffer != location_buffer { + return true; + } + + !location.contains_inclusive(&selection_point.range()) + }) .into_group_map() })?; if locations.is_empty() { @@ -17144,31 +17631,88 @@ impl Editor { ranges.sort_by_key(|range| (range.start, Reverse(range.end))); ranges.dedup(); } + let mut num_locations = 0; + for ranges in locations.values_mut() { + ranges.sort_by_key(|range| (range.start, Reverse(range.end))); + ranges.dedup(); + num_locations += ranges.len(); + } - workspace.update_in(cx, |workspace, window, cx| { - let target = locations - .iter() - .flat_map(|(k, v)| iter::repeat(k.clone()).zip(v)) - .map(|(buffer, location)| { - buffer - .read(cx) - .text_for_range(location.clone()) - .collect::() - }) - .filter(|text| !text.contains('\n')) - .unique() - .take(3) - .join(", "); + if num_locations == 1 && !always_open_multibuffer { + let (target_buffer, target_ranges) = locations.into_iter().next().unwrap(); + let target_range = target_ranges.first().unwrap().clone(); + + return editor.update_in(cx, |editor, window, cx| { + let range = target_range.to_point(target_buffer.read(cx)); + let range = editor.range_for_match(&range); + let range = range.start..range.start; + + if Some(&target_buffer) == editor.buffer.read(cx).as_singleton().as_ref() { + editor.go_to_singleton_buffer_range(range, window, cx); + } else { + let pane = workspace.read(cx).active_pane().clone(); + window.defer(cx, move |window, cx| { + let target_editor: Entity = + workspace.update(cx, |workspace, cx| { + let pane = workspace.active_pane().clone(); + + let preview_tabs_settings = PreviewTabsSettings::get_global(cx); + let keep_old_preview = preview_tabs_settings + .enable_keep_preview_on_code_navigation; + let allow_new_preview = preview_tabs_settings + .enable_preview_file_from_code_navigation; + + workspace.open_project_item( + pane, + target_buffer.clone(), + true, + true, + keep_old_preview, + allow_new_preview, + window, + cx, + ) + }); + target_editor.update(cx, |target_editor, cx| { + // When selecting a definition in a different buffer, disable the nav history + // to avoid creating a history entry at the previous cursor location. + pane.update(cx, |pane, _| pane.disable_history()); + target_editor.go_to_singleton_buffer_range(range, window, cx); + pane.update(cx, |pane, _| pane.enable_history()); + }); + }); + } + Navigated::No + }); + } + + workspace.update_in(cx, |workspace, window, cx| { + let target = locations + .iter() + .flat_map(|(k, v)| iter::repeat(k.clone()).zip(v)) + .map(|(buffer, location)| { + buffer + .read(cx) + .text_for_range(location.clone()) + .collect::() + }) + .filter(|text| !text.contains('\n')) + .unique() + .take(3) + .join(", "); let title = if target.is_empty() { "References".to_owned() } else { format!("References to {target}") }; + let allow_preview = PreviewTabsSettings::get_global(cx) + .enable_preview_multibuffer_from_code_navigation; Self::open_locations_in_multibuffer( workspace, locations, title, false, + allow_preview, MultibufferSelectionMode::First, window, cx, @@ -17178,12 +17722,13 @@ impl Editor { })) } - /// Opens a multibuffer with the given project locations in it + /// Opens a multibuffer with the given project locations in it. pub fn open_locations_in_multibuffer( workspace: &mut Workspace, locations: std::collections::HashMap, Vec>>, title: String, split: bool, + allow_preview: bool, multibuffer_selection_mode: MultibufferSelectionMode, window: &mut Window, cx: &mut Context, @@ -17231,6 +17776,7 @@ impl Editor { .is_some_and(|it| *it == key) }) }); + let was_existing = existing.is_some(); let editor = existing.unwrap_or_else(|| { cx.new(|cx| { let mut editor = Editor::for_multibuffer( @@ -17258,7 +17804,7 @@ impl Editor { } editor.highlight_background::( &ranges, - |theme| theme.colors().editor_highlighted_line_background, + |_, theme| theme.colors().editor_highlighted_line_background, cx, ); } @@ -17271,29 +17817,23 @@ impl Editor { }); let item = Box::new(editor); - let item_id = item.item_id(); - - if split { - let pane = workspace.adjacent_pane(window, cx); - workspace.add_item(pane, item, None, true, true, window, cx); - } else if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation { - let (preview_item_id, preview_item_idx) = - workspace.active_pane().read_with(cx, |pane, _| { - (pane.preview_item_id(), pane.preview_item_idx()) - }); - workspace.add_item_to_active_pane(item, preview_item_idx, true, window, cx); + let pane = if split { + workspace.adjacent_pane(window, cx) + } else { + workspace.active_pane().clone() + }; + let activate_pane = split; - if let Some(preview_item_id) = preview_item_id { - workspace.active_pane().update(cx, |pane, cx| { - pane.remove_item(preview_item_id, false, false, window, cx); - }); + let mut destination_index = None; + pane.update(cx, |pane, cx| { + if allow_preview && !was_existing { + destination_index = pane.replace_preview_item_id(item.item_id(), window, cx); } - } else { - workspace.add_item_to_active_pane(item, None, true, window, cx); - } - workspace.active_pane().update(cx, |pane, cx| { - pane.set_preview_item_id(Some(item_id), cx); + if was_existing && !allow_preview { + pane.unpreview_item_if_preview(item.item_id()); + } + pane.add_item(item, activate_pane, true, destination_index, window, cx); }); } @@ -17356,7 +17896,8 @@ impl Editor { this.take_rename(false, window, cx); let buffer = this.buffer.read(cx).read(cx); let cursor_offset = selection.head().to_offset(&buffer); - let rename_start = cursor_offset.saturating_sub(cursor_offset_in_rename_range); + let rename_start = + cursor_offset.saturating_sub_usize(cursor_offset_in_rename_range); let rename_end = rename_start + rename_buffer_range.len(); let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end); let mut old_highlight_id = None; @@ -17378,8 +17919,16 @@ impl Editor { let rename_editor = cx.new(|cx| { let mut editor = Editor::single_line(window, cx); editor.buffer.update(cx, |buffer, cx| { - buffer.edit([(0..0, old_name.clone())], None, cx) + buffer.edit( + [(MultiBufferOffset(0)..MultiBufferOffset(0), old_name.clone())], + None, + cx, + ) }); + let cursor_offset_in_rename_range = + MultiBufferOffset(cursor_offset_in_rename_range); + let cursor_offset_in_rename_range_end = + MultiBufferOffset(cursor_offset_in_rename_range_end); let rename_selection_range = match cursor_offset_in_rename_range .cmp(&cursor_offset_in_rename_range_end) { @@ -17394,7 +17943,7 @@ impl Editor { cursor_offset_in_rename_range_end..cursor_offset_in_rename_range } }; - if rename_selection_range.end > old_name.len() { + if rename_selection_range.end.0 > old_name.len() { editor.select_all(&SelectAll, window, cx); } else { editor.change_selections(Default::default(), window, cx, |s| { @@ -17430,7 +17979,7 @@ impl Editor { cx, ); let rename_focus_handle = rename_editor.focus_handle(cx); - window.focus(&rename_focus_handle); + window.focus(&rename_focus_handle, cx); let block_id = this.insert_blocks( [BlockProperties { style: BlockStyle::Flex, @@ -17544,7 +18093,7 @@ impl Editor { ) -> Option { let rename = self.pending_rename.take()?; if rename.editor.focus_handle(cx).is_focused(window) { - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); } self.remove_blocks( @@ -17559,7 +18108,7 @@ impl Editor { let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { editor .selections - .newest::(&editor.display_snapshot(cx)) + .newest::(&editor.display_snapshot(cx)) .head() }); @@ -17858,7 +18407,7 @@ impl Editor { let primary_range_start = active_diagnostics.active_range.start.to_offset(&buffer); let primary_range_end = active_diagnostics.active_range.end.to_offset(&buffer); let is_valid = buffer - .diagnostics_in_range::(primary_range_start..primary_range_end) + .diagnostics_in_range::(primary_range_start..primary_range_end) .any(|entry| { entry.diagnostic.is_primary && !entry.range.is_empty() @@ -17890,7 +18439,7 @@ impl Editor { fn activate_diagnostics( &mut self, buffer_id: BufferId, - diagnostic: DiagnosticEntryRef<'_, usize>, + diagnostic: DiagnosticEntryRef<'_, MultiBufferOffset>, window: &mut Window, cx: &mut Context, ) { @@ -17908,8 +18457,18 @@ impl Editor { .diagnostic_group(buffer_id, diagnostic.diagnostic.group_id) .collect::>(); - let blocks = - renderer.render_group(diagnostic_group, buffer_id, snapshot, cx.weak_entity(), cx); + let language_registry = self + .project() + .map(|project| project.read(cx).languages().clone()); + + let blocks = renderer.render_group( + diagnostic_group, + buffer_id, + snapshot, + cx.weak_entity(), + language_registry, + cx, + ); let blocks = self.display_map.update(cx, |display_map, cx| { display_map.insert_blocks(blocks, cx).into_iter().collect() @@ -18072,7 +18631,9 @@ impl Editor { let new_inline_diagnostics = cx .background_spawn(async move { let mut inline_diagnostics = Vec::<(Anchor, InlineDiagnostic)>::new(); - for diagnostic_entry in snapshot.diagnostics_in_range(0..snapshot.len()) { + for diagnostic_entry in + snapshot.diagnostics_in_range(MultiBufferOffset(0)..snapshot.len()) + { let message = diagnostic_entry .diagnostic .message @@ -18128,54 +18689,101 @@ impl Editor { return None; } let project = self.project()?.downgrade(); - let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); - let mut buffers = self.buffer.read(cx).all_buffers(); - buffers.retain(|buffer| { - let buffer_id_to_retain = buffer.read(cx).remote_id(); - buffer_id.is_none_or(|buffer_id| buffer_id == buffer_id_to_retain) - && self.registered_buffers.contains_key(&buffer_id_to_retain) - }); - if buffers.is_empty() { + + let mut edited_buffer_ids = HashSet::default(); + let mut edited_worktree_ids = HashSet::default(); + let edited_buffers = match buffer_id { + Some(buffer_id) => { + let buffer = self.buffer().read(cx).buffer(buffer_id)?; + let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx))?; + edited_buffer_ids.insert(buffer.read(cx).remote_id()); + edited_worktree_ids.insert(worktree_id); + vec![buffer] + } + None => self + .buffer() + .read(cx) + .all_buffers() + .into_iter() + .filter(|buffer| { + let buffer = buffer.read(cx); + match buffer.file().map(|f| f.worktree_id(cx)) { + Some(worktree_id) => { + edited_buffer_ids.insert(buffer.remote_id()); + edited_worktree_ids.insert(worktree_id); + true + } + None => false, + } + }) + .collect::>(), + }; + + if edited_buffers.is_empty() { self.pull_diagnostics_task = Task::ready(()); + self.pull_diagnostics_background_task = Task::ready(()); return None; } - self.pull_diagnostics_task = cx.spawn_in(window, async move |editor, cx| { - cx.background_executor().timer(debounce).await; + let mut already_used_buffers = HashSet::default(); + let related_open_buffers = self + .workspace + .as_ref() + .and_then(|(workspace, _)| workspace.upgrade()) + .into_iter() + .flat_map(|workspace| workspace.read(cx).panes()) + .flat_map(|pane| pane.read(cx).items_of_type::()) + .filter(|editor| editor != &cx.entity()) + .flat_map(|editor| editor.read(cx).buffer().read(cx).all_buffers()) + .filter(|buffer| { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + if already_used_buffers.insert(buffer_id) { + if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx)) { + return !edited_buffer_ids.contains(&buffer_id) + && !edited_worktree_ids.contains(&worktree_id); + } + } + false + }) + .collect::>(); - let Ok(mut pull_diagnostics_tasks) = cx.update(|_, cx| { - buffers - .into_iter() - .filter_map(|buffer| { - project - .update(cx, |project, cx| { - project.lsp_store().update(cx, |lsp_store, cx| { - lsp_store.pull_diagnostics_for_buffer(buffer, cx) + let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); + let make_spawn = |buffers: Vec>, delay: Duration| { + if buffers.is_empty() { + return Task::ready(()); + } + let project_weak = project.clone(); + cx.spawn_in(window, async move |_, cx| { + cx.background_executor().timer(delay).await; + + let Ok(mut pull_diagnostics_tasks) = cx.update(|_, cx| { + buffers + .into_iter() + .filter_map(|buffer| { + project_weak + .update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.pull_diagnostics_for_buffer(buffer, cx) + }) }) - }) - .ok() - }) - .collect::>() - }) else { - return; - }; + .ok() + }) + .collect::>() + }) else { + return; + }; - while let Some(pull_task) = pull_diagnostics_tasks.next().await { - match pull_task { - Ok(()) => { - if editor - .update_in(cx, |editor, window, cx| { - editor.update_diagnostics_state(window, cx); - }) - .is_err() - { - return; - } + while let Some(pull_task) = pull_diagnostics_tasks.next().await { + if let Err(e) = pull_task { + log::error!("Failed to update project diagnostics: {e:#}"); } - Err(e) => log::error!("Failed to update project diagnostics: {e:#}"), } - } - }); + }) + }; + + self.pull_diagnostics_task = make_spawn(edited_buffers, debounce); + self.pull_diagnostics_background_task = make_spawn(related_open_buffers, debounce * 2); Some(()) } @@ -18451,7 +19059,7 @@ impl Editor { if self.buffer.read(cx).is_singleton() { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let has_folds = display_map - .folds_in_range(0..display_map.buffer_snapshot().len()) + .folds_in_range(MultiBufferOffset(0)..display_map.buffer_snapshot().len()) .next() .is_some(); @@ -18654,7 +19262,10 @@ impl Editor { let snapshot = self.buffer.read(cx).snapshot(cx); let ranges = snapshot - .text_object_ranges(0..snapshot.len(), TreeSitterOptions::default()) + .text_object_ranges( + MultiBufferOffset(0)..snapshot.len(), + TreeSitterOptions::default(), + ) .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range)) .collect::>(); @@ -18811,7 +19422,12 @@ impl Editor { ) { if self.buffer.read(cx).is_singleton() { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - self.unfold_ranges(&[0..display_map.buffer_snapshot().len()], true, true, cx); + self.unfold_ranges( + &[MultiBufferOffset(0)..display_map.buffer_snapshot().len()], + true, + true, + cx, + ); } else { self.toggle_fold_multiple_buffers = cx.spawn(async move |editor, cx| { editor @@ -19001,6 +19617,10 @@ impl Editor { self.display_map.read(cx).fold_placeholder.clone() } + pub fn set_use_base_text_line_numbers(&mut self, show: bool, _cx: &mut Context) { + self.use_base_text_line_numbers = show; + } + pub fn set_expand_all_diff_hunks(&mut self, cx: &mut App) { self.buffer.update(cx, |buffer, cx| { buffer.set_all_diff_hunks_expanded(cx); @@ -19236,7 +19856,12 @@ impl Editor { &hunks .map(|hunk| buffer_diff::DiffHunk { buffer_range: hunk.buffer_range, - diff_base_byte_range: hunk.diff_base_byte_range, + // We don't need to pass in word diffs here because they're only used for rendering and + // this function changes internal state + base_word_diffs: Vec::default(), + buffer_word_diffs: Vec::default(), + diff_base_byte_range: hunk.diff_base_byte_range.start.0 + ..hunk.diff_base_byte_range.end.0, secondary_status: hunk.secondary_status, range: Point::zero()..Point::zero(), // unused }) @@ -19274,6 +19899,16 @@ impl Editor { }) } + fn has_any_expanded_diff_hunks(&self, cx: &App) -> bool { + if self.buffer.read(cx).all_diff_hunks_expanded() { + return true; + } + let ranges = vec![Anchor::min()..Anchor::max()]; + self.buffer + .read(cx) + .has_expanded_diff_hunks_in_ranges(&ranges, cx) + } + fn toggle_diff_hunks_in_ranges( &mut self, ranges: Vec>, @@ -19653,8 +20288,11 @@ impl Editor { self.style = Some(style); } - pub fn style(&self) -> Option<&EditorStyle> { - self.style.as_ref() + pub fn style(&mut self, cx: &App) -> &EditorStyle { + if self.style.is_none() { + self.style = Some(self.create_style(cx)); + } + self.style.as_ref().unwrap() } // Called by the element. This method is not designed to be called outside of the editor @@ -19724,6 +20362,20 @@ impl Editor { self.show_indent_guides } + pub fn disable_indent_guides_for_buffer( + &mut self, + buffer_id: BufferId, + cx: &mut Context, + ) { + self.buffers_with_disabled_indent_guides.insert(buffer_id); + cx.notify(); + } + + pub fn has_indent_guides_disabled_for_buffer(&self, buffer_id: BufferId) -> bool { + self.buffers_with_disabled_indent_guides + .contains(&buffer_id) + } + pub fn toggle_line_numbers( &mut self, _: &ToggleLineNumbers, @@ -20021,18 +20673,20 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - if let Some(file) = self.target_file(cx) - && let Some(file_stem) = file.path().file_stem() - { + if let Some(file_stem) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let file = buffer.read(cx).file()?; + file.path().file_stem() + }) { cx.write_to_clipboard(ClipboardItem::new_string(file_stem.to_string())); } } pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { - if let Some(file) = self.target_file(cx) - && let Some(name) = file.path().file_name() - { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); + if let Some(file_name) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let file = buffer.read(cx).file()?; + Some(file.file_name(cx)) + }) { + cx.write_to_clipboard(ClipboardItem::new_string(file_name.to_string())); } } @@ -20235,9 +20889,22 @@ impl Editor { buffer_ranges.last() }?; - let selection = text::ToPoint::to_point(&range.start, buffer).row - ..text::ToPoint::to_point(&range.end, buffer).row; - Some((multi_buffer.buffer(buffer.remote_id()).unwrap(), selection)) + let start_row_in_buffer = text::ToPoint::to_point(&range.start, buffer).row; + let end_row_in_buffer = text::ToPoint::to_point(&range.end, buffer).row; + + let Some(buffer_diff) = multi_buffer.diff_for(buffer.remote_id()) else { + let selection = start_row_in_buffer..end_row_in_buffer; + + return Some((multi_buffer.buffer(buffer.remote_id()).unwrap(), selection)); + }; + + let buffer_diff_snapshot = buffer_diff.read(cx).snapshot(cx); + + Some(( + multi_buffer.buffer(buffer.remote_id()).unwrap(), + buffer_diff_snapshot.row_to_base_text_row(start_row_in_buffer, buffer) + ..buffer_diff_snapshot.row_to_base_text_row(end_row_in_buffer, buffer), + )) }); let Some((buffer, selection)) = buffer_and_selection else { @@ -20306,9 +20973,14 @@ impl Editor { .start .row + 1; - if let Some(file) = self.target_file(cx) { - let path = file.path().display(file.path_style(cx)); - cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); + if let Some(file_location) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let project = self.project()?.read(cx); + let file = buffer.read(cx).file()?; + let path = file.path().display(project.path_style(cx)); + + Some(format!("{path}:{selection}")) + }) { + cx.write_to_clipboard(ClipboardItem::new_string(file_location)); } } @@ -20429,6 +21101,7 @@ impl Editor { locations, format!("Selections for '{title}'"), false, + false, MultibufferSelectionMode::All, window, cx, @@ -20584,8 +21257,7 @@ impl Editor { let start = highlight.range.start.to_display_point(&snapshot); let end = highlight.range.end.to_display_point(&snapshot); let start_row = start.row().0; - let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX - && end.column() == 0 + let end_row = if !highlight.range.end.text_anchor.is_max() && end.column() == 0 { end.row().0.saturating_sub(1) } else { @@ -20632,7 +21304,7 @@ impl Editor { pub fn set_search_within_ranges(&mut self, ranges: &[Range], cx: &mut Context) { self.highlight_background::( ranges, - |colors| colors.colors().editor_document_highlight_read_background, + |_, colors| colors.colors().editor_document_highlight_read_background, cx, ) } @@ -20648,12 +21320,12 @@ impl Editor { pub fn highlight_background( &mut self, ranges: &[Range], - color_fetcher: fn(&Theme) -> Hsla, + color_fetcher: impl Fn(&usize, &Theme) -> Hsla + Send + Sync + 'static, cx: &mut Context, ) { self.background_highlights.insert( HighlightKey::Type(TypeId::of::()), - (color_fetcher, Arc::from(ranges)), + (Arc::new(color_fetcher), Arc::from(ranges)), ); self.scrollbar_marker_state.dirty = true; cx.notify(); @@ -20663,12 +21335,12 @@ impl Editor { &mut self, key: usize, ranges: &[Range], - color_fetcher: fn(&Theme) -> Hsla, + color_fetcher: impl Fn(&usize, &Theme) -> Hsla + Send + Sync + 'static, cx: &mut Context, ) { self.background_highlights.insert( HighlightKey::TypePlus(TypeId::of::(), key), - (color_fetcher, Arc::from(ranges)), + (Arc::new(color_fetcher), Arc::from(ranges)), ); self.scrollbar_marker_state.dirty = true; cx.notify(); @@ -20798,7 +21470,7 @@ impl Editor { ) -> Vec<(Range, Hsla)> { let snapshot = self.snapshot(window, cx); let buffer = &snapshot.buffer_snapshot(); - let start = buffer.anchor_before(0); + let start = buffer.anchor_before(MultiBufferOffset(0)); let end = buffer.anchor_after(buffer.len()); self.sorted_background_highlights_in_range(start..end, &snapshot, cx.theme()) } @@ -20893,7 +21565,6 @@ impl Editor { ) -> Vec<(Range, Hsla)> { let mut results = Vec::new(); for (color_fetcher, ranges) in self.background_highlights.values() { - let color = color_fetcher(theme); let start_ix = match ranges.binary_search_by(|probe| { let cmp = probe .end @@ -20906,7 +21577,7 @@ impl Editor { }) { Ok(i) | Err(i) => i, }; - for range in &ranges[start_ix..] { + for (index, range) in ranges[start_ix..].iter().enumerate() { if range .start .cmp(&search_range.end, &display_snapshot.buffer_snapshot()) @@ -20915,6 +21586,7 @@ impl Editor { break; } + let color = color_fetcher(&(start_ix + index), theme); let start = range.start.to_display_point(display_snapshot); let end = range.end.to_display_point(display_snapshot); results.push((start..end, color)) @@ -20997,13 +21669,16 @@ impl Editor { key: usize, ranges: Vec>, style: HighlightStyle, + merge: bool, cx: &mut Context, ) { - self.display_map.update(cx, |map, _| { + self.display_map.update(cx, |map, cx| { map.highlight_text( HighlightKey::TypePlus(TypeId::of::(), key), ranges, style, + merge, + cx, ); }); cx.notify(); @@ -21015,8 +21690,14 @@ impl Editor { style: HighlightStyle, cx: &mut Context, ) { - self.display_map.update(cx, |map, _| { - map.highlight_text(HighlightKey::Type(TypeId::of::()), ranges, style) + self.display_map.update(cx, |map, cx| { + map.highlight_text( + HighlightKey::Type(TypeId::of::()), + ranges, + style, + false, + cx, + ) }); cx.notify(); } @@ -21132,7 +21813,7 @@ impl Editor { .for_each(|hint| { let inlay = Inlay::debugger( post_inc(&mut editor.next_inlay_id), - Anchor::in_buffer(excerpt_id, buffer_id, hint.position), + Anchor::in_buffer(excerpt_id, hint.position), hint.text(), ); if !inlay.text().chars().contains(&'\n') { @@ -21164,7 +21845,6 @@ impl Editor { self.active_indent_guides_state.dirty = true; self.refresh_active_diagnostics(cx); self.refresh_code_actions(window, cx); - self.refresh_selected_text_highlights(true, window, cx); self.refresh_single_line_folds(window, cx); self.refresh_matching_bracket_highlights(window, cx); if self.has_active_edit_prediction() { @@ -21220,6 +21900,7 @@ impl Editor { } self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + self.colorize_brackets(false, cx); cx.emit(EditorEvent::ExcerptsAdded { buffer: buffer.clone(), predecessor: *predecessor, @@ -21257,10 +21938,16 @@ impl Editor { multi_buffer::Event::ExcerptsExpanded { ids } => { self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); self.refresh_document_highlights(cx); + for id in ids { + self.fetched_tree_sitter_chunks.remove(id); + } + self.colorize_brackets(false, cx); cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) } multi_buffer::Event::Reparsed(buffer_id) => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.refresh_selected_text_highlights(true, window, cx); + self.colorize_brackets(true, cx); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::Reparsed(*buffer_id)); @@ -21268,8 +21955,10 @@ impl Editor { multi_buffer::Event::DiffHunksToggled => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); } - multi_buffer::Event::LanguageChanged(buffer_id) => { - self.registered_buffers.remove(&buffer_id); + multi_buffer::Event::LanguageChanged(buffer_id, is_fresh_language) => { + if !is_fresh_language { + self.registered_buffers.remove(&buffer_id); + } jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::Reparsed(*buffer_id)); cx.notify(); @@ -21331,7 +22020,69 @@ impl Editor { cx.notify(); } + fn fetch_accent_data(&self, cx: &App) -> Option { + if !self.mode.is_full() { + return None; + } + + let theme_settings = theme::ThemeSettings::get_global(cx); + let theme = cx.theme(); + let accent_colors = theme.accents().clone(); + + let accent_overrides = theme_settings + .theme_overrides + .get(theme.name.as_ref()) + .map(|theme_style| &theme_style.accents) + .into_iter() + .flatten() + .chain( + theme_settings + .experimental_theme_overrides + .as_ref() + .map(|overrides| &overrides.accents) + .into_iter() + .flatten(), + ) + .flat_map(|accent| accent.0.clone()) + .collect(); + + Some(AccentData { + colors: accent_colors, + overrides: accent_overrides, + }) + } + + fn fetch_applicable_language_settings( + &self, + cx: &App, + ) -> HashMap, LanguageSettings> { + if !self.mode.is_full() { + return HashMap::default(); + } + + self.buffer().read(cx).all_buffers().into_iter().fold( + HashMap::default(), + |mut acc, buffer| { + let buffer = buffer.read(cx); + let language = buffer.language().map(|language| language.name()); + if let hash_map::Entry::Vacant(v) = acc.entry(language.clone()) { + let file = buffer.file(); + v.insert(language_settings(language, file, cx).into_owned()); + } + acc + }, + ) + } + fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { + let new_language_settings = self.fetch_applicable_language_settings(cx); + let language_settings_changed = new_language_settings != self.applicable_language_settings; + self.applicable_language_settings = new_language_settings; + + let new_accents = self.fetch_accent_data(cx); + let accents_changed = new_accents != self.accent_data; + self.accent_data = new_accents; + if self.diagnostics_enabled() { let new_severity = EditorSettings::get_global(cx) .diagnostics_max_severity @@ -21403,15 +22154,19 @@ impl Editor { }) } } - } - if let Some(inlay_splice) = self.colors.as_mut().and_then(|colors| { - colors.render_mode_updated(EditorSettings::get_global(cx).lsp_document_colors) - }) { - if !inlay_splice.is_empty() { - self.splice_inlays(&inlay_splice.to_remove, inlay_splice.to_insert, cx); + if language_settings_changed || accents_changed { + self.colorize_brackets(true, cx); + } + + if let Some(inlay_splice) = self.colors.as_mut().and_then(|colors| { + colors.render_mode_updated(EditorSettings::get_global(cx).lsp_document_colors) + }) { + if !inlay_splice.is_empty() { + self.splice_inlays(&inlay_splice.to_remove, inlay_splice.to_insert, cx); + } + self.refresh_colors_for_visible_range(None, window, cx); } - self.refresh_colors_for_visible_range(None, window, cx); } cx.notify(); @@ -21478,7 +22233,7 @@ impl Editor { new_selections_by_buffer.insert( buffer, ( - vec![jump_to_offset..jump_to_offset], + vec![BufferOffset(jump_to_offset)..BufferOffset(jump_to_offset)], Some(*line_offset_from_top), ), ); @@ -21497,11 +22252,13 @@ impl Editor { .entry(buffer) .or_insert((Vec::new(), Some(*line_offset_from_top))) .0 - .push(buffer_offset..buffer_offset) + .push(BufferOffset(buffer_offset)..BufferOffset(buffer_offset)) } } None => { - let selections = self.selections.all::(&self.display_snapshot(cx)); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); let multi_buffer = self.buffer.read(cx); for selection in selections { for (snapshot, range, _, anchor) in multi_buffer @@ -21517,7 +22274,7 @@ impl Editor { &anchor.text_anchor, &buffer_handle.read(cx).snapshot(), ); - let range = offset..offset; + let range = BufferOffset(offset)..BufferOffset(offset); new_selections_by_buffer .entry(buffer_handle) .or_insert((Vec::new(), None)) @@ -21558,54 +22315,87 @@ impl Editor { }; for (buffer, (ranges, scroll_offset)) in new_selections_by_buffer { - let editor = buffer - .read(cx) - .file() - .is_none() + let buffer_read = buffer.read(cx); + let (has_file, is_project_file) = if let Some(file) = buffer_read.file() { + (true, project::File::from_dyn(Some(file)).is_some()) + } else { + (false, false) + }; + + // If project file is none workspace.open_project_item will fail to open the excerpt + // in a pre existing workspace item if one exists, because Buffer entity_id will be None + // so we check if there's a tab match in that case first + let editor = (!has_file || !is_project_file) .then(|| { // Handle file-less buffers separately: those are not really the project items, so won't have a project path or entity id, // so `workspace.open_project_item` will never find them, always opening a new editor. // Instead, we try to activate the existing editor in the pane first. - let (editor, pane_item_index) = + let (editor, pane_item_index, pane_item_id) = pane.read(cx).items().enumerate().find_map(|(i, item)| { let editor = item.downcast::()?; let singleton_buffer = editor.read(cx).buffer().read(cx).as_singleton()?; if singleton_buffer == buffer { - Some((editor, i)) + Some((editor, i, item.item_id())) } else { None } })?; pane.update(cx, |pane, cx| { - pane.activate_item(pane_item_index, true, true, window, cx) + pane.activate_item(pane_item_index, true, true, window, cx); + if !PreviewTabsSettings::get_global(cx) + .enable_preview_from_multibuffer + { + pane.unpreview_item_if_preview(pane_item_id); + } }); Some(editor) }) .flatten() .unwrap_or_else(|| { + let keep_old_preview = PreviewTabsSettings::get_global(cx) + .enable_keep_preview_on_code_navigation; + let allow_new_preview = + PreviewTabsSettings::get_global(cx).enable_preview_from_multibuffer; workspace.open_project_item::( pane.clone(), buffer, true, true, + keep_old_preview, + allow_new_preview, window, cx, ) }); editor.update(cx, |editor, cx| { + if has_file && !is_project_file { + editor.set_read_only(true); + } let autoscroll = match scroll_offset { Some(scroll_offset) => Autoscroll::top_relative(scroll_offset as usize), None => Autoscroll::newest(), }; let nav_history = editor.nav_history.take(); + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let Some((&excerpt_id, _, buffer_snapshot)) = + multibuffer_snapshot.as_singleton() + else { + return; + }; editor.change_selections( SelectionEffects::scroll(autoscroll), window, cx, |s| { - s.select_ranges(ranges); + s.select_ranges(ranges.into_iter().map(|range| { + let range = buffer_snapshot.anchor_before(range.start) + ..buffer_snapshot.anchor_after(range.end); + multibuffer_snapshot + .anchor_range_in_excerpt(excerpt_id, range) + .unwrap() + })); }, ); editor.nav_history = nav_history; @@ -21615,13 +22405,14 @@ impl Editor { }); } - // For now, don't allow opening excerpts in buffers that aren't backed by - // regular project files. + // Allow opening excerpts for buffers that either belong to the current project + // or represent synthetic/non-local files (e.g., git blobs). File-less buffers + // are also supported so tests and other in-memory views keep working. fn can_open_excerpts_in_file(file: Option<&Arc>) -> bool { - file.is_none_or(|file| project::File::from_dyn(Some(file)).is_some()) + file.is_none_or(|file| project::File::from_dyn(Some(file)).is_some() || !file.is_local()) } - fn marked_text_ranges(&self, cx: &App) -> Option>> { + fn marked_text_ranges(&self, cx: &App) -> Option>> { let snapshot = self.buffer.read(cx).read(cx); let (_, ranges) = self.text_highlights::(cx)?; Some( @@ -21636,25 +22427,25 @@ impl Editor { fn selection_replacement_ranges( &self, - range: Range, + range: Range, cx: &mut App, - ) -> Vec> { + ) -> Vec> { let selections = self .selections - .all::(&self.display_snapshot(cx)); + .all::(&self.display_snapshot(cx)); let newest_selection = selections .iter() .max_by_key(|selection| selection.id) .unwrap(); - let start_delta = range.start.0 as isize - newest_selection.start.0 as isize; - let end_delta = range.end.0 as isize - newest_selection.end.0 as isize; + let start_delta = range.start.0.0 as isize - newest_selection.start.0.0 as isize; + let end_delta = range.end.0.0 as isize - newest_selection.end.0.0 as isize; let snapshot = self.buffer.read(cx).read(cx); selections .into_iter() .map(|mut selection| { - selection.start.0 = - (selection.start.0 as isize).saturating_add(start_delta) as usize; - selection.end.0 = (selection.end.0 as isize).saturating_add(end_delta) as usize; + selection.start.0.0 = + (selection.start.0.0 as isize).saturating_add(start_delta) as usize; + selection.end.0.0 = (selection.end.0.0 as isize).saturating_add(end_delta) as usize; snapshot.clip_offset_utf16(selection.start, Bias::Left) ..snapshot.clip_offset_utf16(selection.end, Bias::Right) }) @@ -21685,7 +22476,9 @@ impl Editor { .and_then(|e| e.to_str()) .map(|a| a.to_string())); - let vim_mode = vim_flavor(cx).is_some(); + let vim_mode = vim_mode_setting::VimModeSetting::try_get(cx) + .map(|vim_mode| vim_mode.0) + .unwrap_or(false); let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider; let copilot_enabled = edit_predictions_provider @@ -21745,12 +22538,16 @@ impl Editor { None } else { Some( - snapshot.offset_utf16_to_offset(OffsetUtf16(selection.range.start)) - ..snapshot.offset_utf16_to_offset(OffsetUtf16(selection.range.end)), + snapshot.offset_utf16_to_offset(MultiBufferOffsetUtf16(OffsetUtf16( + selection.range.start, + ))) + ..snapshot.offset_utf16_to_offset(MultiBufferOffsetUtf16(OffsetUtf16( + selection.range.end, + ))), ) } }) - .unwrap_or_else(|| 0..snapshot.len()); + .unwrap_or_else(|| MultiBufferOffset(0)..snapshot.len()); let chunks = snapshot.chunks(range, true); let mut lines = Vec::new(); @@ -21828,21 +22625,23 @@ impl Editor { if let Some(relative_utf16_range) = relative_utf16_range { let selections = self .selections - .all::(&self.display_snapshot(cx)); + .all::(&self.display_snapshot(cx)); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { let new_ranges = selections.into_iter().map(|range| { - let start = OffsetUtf16( + let start = MultiBufferOffsetUtf16(OffsetUtf16( range .head() .0 + .0 .saturating_add_signed(relative_utf16_range.start), - ); - let end = OffsetUtf16( + )); + let end = MultiBufferOffsetUtf16(OffsetUtf16( range .head() .0 + .0 .saturating_add_signed(relative_utf16_range.end), - ); + )); start..end }); s.select_ranges(new_ranges); @@ -21864,7 +22663,7 @@ impl Editor { .take() .and_then(|descendant| descendant.upgrade()) { - window.focus(&descendant); + window.focus(&descendant, cx); } else { if let Some(blame) = self.blame.as_ref() { blame.update(cx, GitBlame::focus) @@ -21883,6 +22682,20 @@ impl Editor { ); } }); + + if let Some(position_map) = self.last_position_map.clone() { + EditorElement::mouse_moved( + self, + &MouseMoveEvent { + position: window.mouse_position(), + pressed_button: None, + modifiers: window.modifiers(), + }, + &position_map, + window, + cx, + ); + } } } @@ -21932,13 +22745,7 @@ impl Editor { .pending_input_keystrokes() .into_iter() .flatten() - .filter_map(|keystroke| { - if keystroke.modifiers.is_subset_of(&Modifiers::shift()) { - keystroke.key_char.clone() - } else { - None - } - }) + .filter_map(|keystroke| keystroke.key_char.clone()) .collect(); if !self.input_enabled || self.read_only || !self.focus_handle.is_focused(window) { @@ -21953,7 +22760,9 @@ impl Editor { } let transaction = self.transact(window, cx, |this, window, cx| { - let selections = this.selections.all::(&this.display_snapshot(cx)); + let selections = this + .selections + .all::(&this.display_snapshot(cx)); let edits = selections .iter() .map(|selection| (selection.end..selection.end, pending.clone())); @@ -21972,7 +22781,7 @@ impl Editor { let snapshot = self.snapshot(window, cx); let ranges = self .selections - .all::(&snapshot.display_snapshot) + .all::(&snapshot.display_snapshot) .into_iter() .map(|selection| { snapshot.buffer_snapshot().anchor_after(selection.end) @@ -22061,10 +22870,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let workspace = self.workspace(); - let project = self.project(); - let save_tasks = self.buffer().update(cx, |multi_buffer, cx| { - let mut tasks = Vec::new(); + self.buffer().update(cx, |multi_buffer, cx| { for (buffer_id, changes) in revert_changes { if let Some(buffer) = multi_buffer.buffer(buffer_id) { buffer.update(cx, |buffer, cx| { @@ -22076,66 +22882,33 @@ impl Editor { cx, ); }); - - if let Some(project) = - project.filter(|_| multi_buffer.all_diff_hunks_expanded()) - { - project.update(cx, |project, cx| { - tasks.push((buffer.clone(), project.save_buffer(buffer, cx))); - }) - } } } - tasks }); - cx.spawn_in(window, async move |_, cx| { - for (buffer, task) in save_tasks { - let result = task.await; - if result.is_err() { - let Some(path) = buffer - .read_with(cx, |buffer, cx| buffer.project_path(cx)) - .ok() - else { - continue; - }; - if let Some((workspace, path)) = workspace.as_ref().zip(path) { - let Some(task) = cx - .update_window_entity(workspace, |workspace, window, cx| { - workspace - .open_path_preview(path, None, false, false, false, window, cx) - }) - .ok() - else { - continue; - }; - task.await.log_err(); - } - } - } - }) - .detach(); self.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { selections.refresh() }); } pub fn to_pixel_point( - &self, + &mut self, source: multi_buffer::Anchor, editor_snapshot: &EditorSnapshot, window: &mut Window, + cx: &App, ) -> Option> { let source_point = source.to_display_point(editor_snapshot); - self.display_to_pixel_point(source_point, editor_snapshot, window) + self.display_to_pixel_point(source_point, editor_snapshot, window, cx) } pub fn display_to_pixel_point( - &self, + &mut self, source: DisplayPoint, editor_snapshot: &EditorSnapshot, window: &mut Window, + cx: &App, ) -> Option> { - let line_height = self.style()?.text.line_height_in_pixels(window.rem_size()); + let line_height = self.style(cx).text.line_height_in_pixels(window.rem_size()); let text_layout_details = self.text_layout_details(window); let scroll_top = text_layout_details .scroll_anchor @@ -22212,7 +22985,8 @@ impl Editor { ) { if self.buffer_kind(cx) == ItemBufferKind::Singleton && !self.mode.is_minimap() - && WorkspaceSettings::get(None, cx).restore_on_startup != RestoreOnStartupBehavior::None + && WorkspaceSettings::get(None, cx).restore_on_startup + != RestoreOnStartupBehavior::EmptyTab { let buffer_snapshot = OnceCell::new(); @@ -22224,8 +22998,8 @@ impl Editor { folds .into_iter() .map(|(start, end)| { - snapshot.clip_offset(start, Bias::Left) - ..snapshot.clip_offset(end, Bias::Right) + snapshot.clip_offset(MultiBufferOffset(start), Bias::Left) + ..snapshot.clip_offset(MultiBufferOffset(end), Bias::Right) }) .collect(), false, @@ -22242,8 +23016,8 @@ impl Editor { self.selection_history.mode = SelectionHistoryMode::Skipping; self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges(selections.into_iter().map(|(start, end)| { - snapshot.clip_offset(start, Bias::Left) - ..snapshot.clip_offset(end, Bias::Right) + snapshot.clip_offset(MultiBufferOffset(start), Bias::Left) + ..snapshot.clip_offset(MultiBufferOffset(end), Bias::Right) })); }); self.selection_history.mode = SelectionHistoryMode::Normal; @@ -22267,7 +23041,7 @@ impl Editor { if self.ignore_lsp_data() { return; } - for (_, (visible_buffer, _, _)) in self.visible_excerpts(cx) { + for (_, (visible_buffer, _, _)) in self.visible_excerpts(true, cx) { self.register_buffer(visible_buffer.read(cx).remote_id(), cx); } } @@ -22298,48 +23072,77 @@ impl Editor { // skip any LSP updates for it. self.active_diagnostics == ActiveDiagnostic::All || !self.mode().is_full() } -} - -fn edit_for_markdown_paste<'a>( - buffer: &MultiBufferSnapshot, - range: Range, - to_insert: &'a str, - url: Option, -) -> (Range, Cow<'a, str>) { - if url.is_none() { - return (range, Cow::Borrowed(to_insert)); - }; - let old_text = buffer.text_for_range(range.clone()).collect::(); + fn create_style(&self, cx: &App) -> EditorStyle { + let settings = ThemeSettings::get_global(cx); - let new_text = if range.is_empty() || url::Url::parse(&old_text).is_ok() { - Cow::Borrowed(to_insert) - } else { - Cow::Owned(format!("[{old_text}]({to_insert})")) - }; - (range, new_text) -} + let mut text_style = match self.mode { + EditorMode::SingleLine | EditorMode::AutoHeight { .. } => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_fallbacks: settings.ui_font.fallbacks.clone(), + font_size: rems(0.875).into(), + font_weight: settings.ui_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }, + EditorMode::Full { .. } | EditorMode::Minimap { .. } => TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }, + }; + if let Some(text_style_refinement) = &self.text_style_refinement { + text_style.refine(text_style_refinement) + } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum VimFlavor { - Vim, - Helix, + let background = match self.mode { + EditorMode::SingleLine => cx.theme().system().transparent, + EditorMode::AutoHeight { .. } => cx.theme().system().transparent, + EditorMode::Full { .. } => cx.theme().colors().editor_background, + EditorMode::Minimap { .. } => cx.theme().colors().editor_background.opacity(0.7), + }; + + EditorStyle { + background, + border: cx.theme().colors().border, + local_player: cx.theme().players().local(), + text: text_style, + scrollbar_width: EditorElement::SCROLLBAR_WIDTH, + syntax: cx.theme().syntax().clone(), + status: cx.theme().status().clone(), + inlay_hints_style: make_inlay_hints_style(cx), + edit_prediction_styles: make_suggestion_styles(cx), + unnecessary_code_fade: settings.unnecessary_code_fade, + show_underlines: self.diagnostics_enabled(), + } + } } -pub fn vim_flavor(cx: &App) -> Option { - if vim_mode_setting::HelixModeSetting::try_get(cx) - .map(|helix_mode| helix_mode.0) - .unwrap_or(false) - { - Some(VimFlavor::Helix) - } else if vim_mode_setting::VimModeSetting::try_get(cx) - .map(|vim_mode| vim_mode.0) - .unwrap_or(false) - { - Some(VimFlavor::Vim) +fn edit_for_markdown_paste<'a>( + buffer: &MultiBufferSnapshot, + range: Range, + to_insert: &'a str, + url: Option, +) -> (Range, Cow<'a, str>) { + if url.is_none() { + return (range, Cow::Borrowed(to_insert)); + }; + + let old_text = buffer.text_for_range(range.clone()).collect::(); + + let new_text = if range.is_empty() || url::Url::parse(&old_text).is_ok() { + Cow::Borrowed(to_insert) } else { - None // neither vim nor helix mode - } + Cow::Owned(format!("[{old_text}]({to_insert})")) + }; + (range, new_text) } fn process_completion_for_edit( @@ -22477,83 +23280,270 @@ fn process_completion_for_edit( range_to_replace.end = *cursor_position; } + let replace_range = range_to_replace.to_offset(buffer); CompletionEdit { new_text, - replace_range: range_to_replace.to_offset(buffer), + replace_range: BufferOffset(replace_range.start)..BufferOffset(replace_range.end), snippet, } } struct CompletionEdit { new_text: String, - replace_range: Range, + replace_range: Range, snippet: Option, } -fn insert_extra_newline_brackets( +fn comment_delimiter_for_newline( + start_point: &Point, buffer: &MultiBufferSnapshot, - range: Range, - language: &language::LanguageScope, -) -> bool { - let leading_whitespace_len = buffer - .reversed_chars_at(range.start) - .take_while(|c| c.is_whitespace() && *c != '\n') - .map(|c| c.len_utf8()) - .sum::(); - let trailing_whitespace_len = buffer - .chars_at(range.end) - .take_while(|c| c.is_whitespace() && *c != '\n') - .map(|c| c.len_utf8()) - .sum::(); - let range = range.start - leading_whitespace_len..range.end + trailing_whitespace_len; - - language.brackets().any(|(pair, enabled)| { - let pair_start = pair.start.trim_end(); - let pair_end = pair.end.trim_start(); - - enabled - && pair.newline - && buffer.contains_str_at(range.end, pair_end) - && buffer.contains_str_at(range.start.saturating_sub(pair_start.len()), pair_start) - }) + language: &LanguageScope, +) -> Option> { + let delimiters = language.line_comment_prefixes(); + let max_len_of_delimiter = delimiters.iter().map(|delimiter| delimiter.len()).max()?; + let (snapshot, range) = buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; + + let num_of_whitespaces = snapshot + .chars_for_range(range.clone()) + .take_while(|c| c.is_whitespace()) + .count(); + let comment_candidate = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(max_len_of_delimiter) + .collect::(); + let (delimiter, trimmed_len) = delimiters + .iter() + .filter_map(|delimiter| { + let prefix = delimiter.trim_end(); + if comment_candidate.starts_with(prefix) { + Some((delimiter, prefix.len())) + } else { + None + } + }) + .max_by_key(|(_, len)| *len)?; + + if let Some(BlockCommentConfig { + start: block_start, .. + }) = language.block_comment() + { + let block_start_trimmed = block_start.trim_end(); + if block_start_trimmed.starts_with(delimiter.trim_end()) { + let line_content = snapshot + .chars_for_range(range) + .skip(num_of_whitespaces) + .take(block_start_trimmed.len()) + .collect::(); + + if line_content.starts_with(block_start_trimmed) { + return None; + } + } + } + + let cursor_is_placed_after_comment_marker = + num_of_whitespaces + trimmed_len <= start_point.column as usize; + if cursor_is_placed_after_comment_marker { + Some(delimiter.clone()) + } else { + None + } } -fn insert_extra_newline_tree_sitter(buffer: &MultiBufferSnapshot, range: Range) -> bool { - let (buffer, range) = match buffer.range_to_buffer_ranges(range).as_slice() { - [(buffer, range, _)] => (*buffer, range.clone()), - _ => return false, +fn documentation_delimiter_for_newline( + start_point: &Point, + buffer: &MultiBufferSnapshot, + language: &LanguageScope, + newline_formatting: &mut NewlineFormatting, +) -> Option> { + let BlockCommentConfig { + start: start_tag, + end: end_tag, + prefix: delimiter, + tab_size: len, + } = language.documentation_comment()?; + let is_within_block_comment = buffer + .language_scope_at(*start_point) + .is_some_and(|scope| scope.override_name() == Some("comment")); + if !is_within_block_comment { + return None; + } + + let (snapshot, range) = buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; + + let num_of_whitespaces = snapshot + .chars_for_range(range.clone()) + .take_while(|c| c.is_whitespace()) + .count(); + + // It is safe to use a column from MultiBufferPoint in context of a single buffer ranges, because we're only ever looking at a single line at a time. + let column = start_point.column; + let cursor_is_after_start_tag = { + let start_tag_len = start_tag.len(); + let start_tag_line = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(start_tag_len) + .collect::(); + if start_tag_line.starts_with(start_tag.as_ref()) { + num_of_whitespaces + start_tag_len <= column as usize + } else { + false + } + }; + + let cursor_is_after_delimiter = { + let delimiter_trim = delimiter.trim_end(); + let delimiter_line = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(delimiter_trim.len()) + .collect::(); + if delimiter_line.starts_with(delimiter_trim) { + num_of_whitespaces + delimiter_trim.len() <= column as usize + } else { + false + } }; - let pair = { - let mut result: Option = None; - for pair in buffer - .all_bracket_ranges(range.clone()) - .filter(move |pair| { - pair.open_range.start <= range.start && pair.close_range.end >= range.end - }) - { - let len = pair.close_range.end - pair.open_range.start; + let cursor_is_before_end_tag_if_exists = { + let mut char_position = 0u32; + let mut end_tag_offset = None; - if let Some(existing) = &result { - let existing_len = existing.close_range.end - existing.open_range.start; - if len > existing_len { - continue; + 'outer: for chunk in snapshot.text_for_range(range) { + if let Some(byte_pos) = chunk.find(&**end_tag) { + let chars_before_match = chunk[..byte_pos].chars().count() as u32; + end_tag_offset = Some(char_position + chars_before_match); + break 'outer; + } + char_position += chunk.chars().count() as u32; + } + + if let Some(end_tag_offset) = end_tag_offset { + let cursor_is_before_end_tag = column <= end_tag_offset; + if cursor_is_after_start_tag { + if cursor_is_before_end_tag { + newline_formatting.insert_extra_newline = true; + } + let cursor_is_at_start_of_end_tag = column == end_tag_offset; + if cursor_is_at_start_of_end_tag { + newline_formatting.indent_on_extra_newline.len = *len; } } + cursor_is_before_end_tag + } else { + true + } + }; - result = Some(pair); + if (cursor_is_after_start_tag || cursor_is_after_delimiter) + && cursor_is_before_end_tag_if_exists + { + if cursor_is_after_start_tag { + newline_formatting.indent_on_newline.len = *len; } + Some(delimiter.clone()) + } else { + None + } +} - result - }; - let Some(pair) = pair else { - return false; - }; - pair.newline_only - && buffer - .chars_for_range(pair.open_range.end..range.start) - .chain(buffer.chars_for_range(range.end..pair.close_range.start)) - .all(|c| c.is_whitespace() && c != '\n') +#[derive(Debug, Default)] +struct NewlineFormatting { + insert_extra_newline: bool, + indent_on_newline: IndentSize, + indent_on_extra_newline: IndentSize, +} + +impl NewlineFormatting { + fn new( + buffer: &MultiBufferSnapshot, + range: Range, + language: &LanguageScope, + ) -> Self { + Self { + insert_extra_newline: Self::insert_extra_newline_brackets( + buffer, + range.clone(), + language, + ) || Self::insert_extra_newline_tree_sitter(buffer, range), + indent_on_newline: IndentSize::spaces(0), + indent_on_extra_newline: IndentSize::spaces(0), + } + } + + fn insert_extra_newline_brackets( + buffer: &MultiBufferSnapshot, + range: Range, + language: &language::LanguageScope, + ) -> bool { + let leading_whitespace_len = buffer + .reversed_chars_at(range.start) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + let trailing_whitespace_len = buffer + .chars_at(range.end) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + let range = range.start - leading_whitespace_len..range.end + trailing_whitespace_len; + + language.brackets().any(|(pair, enabled)| { + let pair_start = pair.start.trim_end(); + let pair_end = pair.end.trim_start(); + + enabled + && pair.newline + && buffer.contains_str_at(range.end, pair_end) + && buffer.contains_str_at( + range.start.saturating_sub_usize(pair_start.len()), + pair_start, + ) + }) + } + + fn insert_extra_newline_tree_sitter( + buffer: &MultiBufferSnapshot, + range: Range, + ) -> bool { + let (buffer, range) = match buffer.range_to_buffer_ranges(range).as_slice() { + [(buffer, range, _)] => (*buffer, range.clone()), + _ => return false, + }; + let pair = { + let mut result: Option> = None; + + for pair in buffer + .all_bracket_ranges(range.start.0..range.end.0) + .filter(move |pair| { + pair.open_range.start <= range.start.0 && pair.close_range.end >= range.end.0 + }) + { + let len = pair.close_range.end - pair.open_range.start; + + if let Some(existing) = &result { + let existing_len = existing.close_range.end - existing.open_range.start; + if len > existing_len { + continue; + } + } + + result = Some(pair); + } + + result + }; + let Some(pair) = pair else { + return false; + }; + pair.newline_only + && buffer + .chars_for_range(pair.open_range.end..range.start.0) + .chain(buffer.chars_for_range(range.end.0..pair.close_range.start)) + .all(|c| c.is_whitespace() && c != '\n') + } } fn update_uncommitted_diff_for_buffer( @@ -23103,7 +24093,6 @@ pub trait CompletionProvider { position: language::Anchor, text: &str, trigger_in_words: bool, - menu_is_open: bool, cx: &mut Context, ) -> bool; @@ -23194,10 +24183,11 @@ impl CodeActionProvider for Entity { fn snippet_completions( project: &Project, buffer: &Entity, - buffer_position: text::Anchor, + buffer_anchor: text::Anchor, + classifier: CharClassifier, cx: &mut App, ) -> Task> { - let languages = buffer.read(cx).languages_at(buffer_position); + let languages = buffer.read(cx).languages_at(buffer_anchor); let snippet_store = project.snippets().read(cx); let scopes: Vec<_> = languages @@ -23226,97 +24216,146 @@ fn snippet_completions( let executor = cx.background_executor().clone(); cx.background_spawn(async move { + let is_word_char = |c| classifier.is_word(c); + let mut is_incomplete = false; let mut completions: Vec = Vec::new(); - for (scope, snippets) in scopes.into_iter() { - let classifier = - CharClassifier::new(Some(scope)).scope_context(Some(CharScopeContext::Completion)); - - const MAX_WORD_PREFIX_LEN: usize = 128; - let last_word: String = snapshot - .reversed_chars_for_range(text::Anchor::MIN..buffer_position) - .take(MAX_WORD_PREFIX_LEN) - .take_while(|c| classifier.is_word(*c)) - .collect::() - .chars() - .rev() - .collect(); - if last_word.is_empty() { - return Ok(CompletionResponse { - completions: vec![], - display_options: CompletionDisplayOptions::default(), - is_incomplete: true, - }); - } + const MAX_PREFIX_LEN: usize = 128; + let buffer_offset = text::ToOffset::to_offset(&buffer_anchor, &snapshot); + let window_start = buffer_offset.saturating_sub(MAX_PREFIX_LEN); + let window_start = snapshot.clip_offset(window_start, Bias::Left); - let as_offset = text::ToOffset::to_offset(&buffer_position, &snapshot); - let to_lsp = |point: &text::Anchor| { - let end = text::ToPointUtf16::to_point_utf16(point, &snapshot); - point_to_lsp(end) - }; - let lsp_end = to_lsp(&buffer_position); + let max_buffer_window: String = snapshot + .text_for_range(window_start..buffer_offset) + .collect(); + + if max_buffer_window.is_empty() { + return Ok(CompletionResponse { + completions: vec![], + display_options: CompletionDisplayOptions::default(), + is_incomplete: true, + }); + } - let candidates = snippets + for (_scope, snippets) in scopes.into_iter() { + // Sort snippets by word count to match longer snippet prefixes first. + let mut sorted_snippet_candidates = snippets .iter() .enumerate() - .flat_map(|(ix, snippet)| { + .flat_map(|(snippet_ix, snippet)| { snippet .prefix .iter() - .map(move |prefix| StringMatchCandidate::new(ix, prefix)) + .enumerate() + .map(move |(prefix_ix, prefix)| { + let word_count = + snippet_candidate_suffixes(prefix, is_word_char).count(); + ((snippet_ix, prefix_ix), prefix, word_count) + }) }) - .collect::>(); + .collect_vec(); + sorted_snippet_candidates + .sort_unstable_by_key(|(_, _, word_count)| Reverse(*word_count)); + + // Each prefix may be matched multiple times; the completion menu must filter out duplicates. + + let buffer_windows = snippet_candidate_suffixes(&max_buffer_window, is_word_char) + .take( + sorted_snippet_candidates + .first() + .map(|(_, _, word_count)| *word_count) + .unwrap_or_default(), + ) + .collect_vec(); const MAX_RESULTS: usize = 100; - let mut matches = fuzzy::match_strings( - &candidates, - &last_word, - last_word.chars().any(|c| c.is_uppercase()), - true, - MAX_RESULTS, - &Default::default(), - executor.clone(), - ) - .await; + // Each match also remembers how many characters from the buffer it consumed + let mut matches: Vec<(StringMatch, usize)> = vec![]; + + let mut snippet_list_cutoff_index = 0; + for (buffer_index, buffer_window) in buffer_windows.iter().enumerate().rev() { + let word_count = buffer_index + 1; + // Increase `snippet_list_cutoff_index` until we have all of the + // snippets with sufficiently many words. + while sorted_snippet_candidates + .get(snippet_list_cutoff_index) + .is_some_and(|(_ix, _prefix, snippet_word_count)| { + *snippet_word_count >= word_count + }) + { + snippet_list_cutoff_index += 1; + } - if matches.len() >= MAX_RESULTS { - is_incomplete = true; - } + // Take only the candidates with at least `word_count` many words + let snippet_candidates_at_word_len = + &sorted_snippet_candidates[..snippet_list_cutoff_index]; - // Remove all candidates where the query's start does not match the start of any word in the candidate - if let Some(query_start) = last_word.chars().next() { - matches.retain(|string_match| { - split_words(&string_match.string).any(|word| { - // Check that the first codepoint of the word as lowercase matches the first - // codepoint of the query as lowercase - word.chars() - .flat_map(|codepoint| codepoint.to_lowercase()) - .zip(query_start.to_lowercase()) - .all(|(word_cp, query_cp)| word_cp == query_cp) + let candidates = snippet_candidates_at_word_len + .iter() + .map(|(_snippet_ix, prefix, _snippet_word_count)| prefix) + .enumerate() // index in `sorted_snippet_candidates` + // First char must match + .filter(|(_ix, prefix)| { + itertools::equal( + prefix + .chars() + .next() + .into_iter() + .flat_map(|c| c.to_lowercase()), + buffer_window + .chars() + .next() + .into_iter() + .flat_map(|c| c.to_lowercase()), + ) }) - }); + .map(|(ix, prefix)| StringMatchCandidate::new(ix, prefix)) + .collect::>(); + + matches.extend( + fuzzy::match_strings( + &candidates, + &buffer_window, + buffer_window.chars().any(|c| c.is_uppercase()), + true, + MAX_RESULTS - matches.len(), // always prioritize longer snippets + &Default::default(), + executor.clone(), + ) + .await + .into_iter() + .map(|string_match| (string_match, buffer_window.len())), + ); + + if matches.len() >= MAX_RESULTS { + break; + } } - let matched_strings = matches - .into_iter() - .map(|m| m.string) - .collect::>(); + let to_lsp = |point: &text::Anchor| { + let end = text::ToPointUtf16::to_point_utf16(point, &snapshot); + point_to_lsp(end) + }; + let lsp_end = to_lsp(&buffer_anchor); - completions.extend(snippets.iter().filter_map(|snippet| { - let matching_prefix = snippet - .prefix - .iter() - .find(|prefix| matched_strings.contains(*prefix))?; - let start = as_offset - last_word.len(); + if matches.len() >= MAX_RESULTS { + is_incomplete = true; + } + + completions.extend(matches.iter().map(|(string_match, buffer_window_len)| { + let ((snippet_index, prefix_index), matching_prefix, _snippet_word_count) = + sorted_snippet_candidates[string_match.candidate_id]; + let snippet = &snippets[snippet_index]; + let start = buffer_offset - buffer_window_len; let start = snapshot.anchor_before(start); - let range = start..buffer_position; + let range = start..buffer_anchor; let lsp_start = to_lsp(&start); let lsp_range = lsp::Range { start: lsp_start, end: lsp_end, }; - Some(Completion { + Completion { replace_range: range, new_text: snippet.body.clone(), source: CompletionSource::Lsp { @@ -23346,7 +24385,11 @@ fn snippet_completions( }), lsp_defaults: None, }, - label: CodeLabel::plain(matching_prefix.clone(), None), + label: CodeLabel { + text: matching_prefix.clone(), + runs: Vec::new(), + filter_range: 0..matching_prefix.len(), + }, icon_path: None, documentation: Some(CompletionDocumentation::SingleLineAndMultiLinePlainText { single_line: snippet.name.clone().into(), @@ -23357,8 +24400,10 @@ fn snippet_completions( }), insert_text_mode: None, confirm: None, - }) - })) + match_start: Some(start), + snippet_deduplication_key: Some((snippet_index, prefix_index)), + } + })); } Ok(CompletionResponse { @@ -23426,7 +24471,6 @@ impl CompletionProvider for Entity { position: language::Anchor, text: &str, trigger_in_words: bool, - menu_is_open: bool, cx: &mut Context, ) -> bool { let mut chars = text.chars(); @@ -23441,9 +24485,6 @@ impl CompletionProvider for Entity { let buffer = buffer.read(cx); let snapshot = buffer.snapshot(); - if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input { - return false; - } let classifier = snapshot .char_classifier_at(position) .scope_context(Some(CharScopeContext::Completion)); @@ -23750,13 +24791,15 @@ impl EditorSnapshot { end_row.0 += 1; } let is_created_file = hunk.is_created_file(); + DisplayDiffHunk::Unfolded { status: hunk.status(), - diff_base_byte_range: hunk.diff_base_byte_range, + diff_base_byte_range: hunk.diff_base_byte_range.start.0 + ..hunk.diff_base_byte_range.end.0, + word_diffs: hunk.word_diffs, display_row_range: hunk_display_start.row()..end_row, multi_buffer_range: Anchor::range_in_buffer( hunk.excerpt_id, - hunk.buffer_id, hunk.buffer_range, ), is_created_file, @@ -23787,94 +24830,98 @@ impl EditorSnapshot { self.scroll_anchor.scroll_position(&self.display_snapshot) } - fn gutter_dimensions( + pub fn gutter_dimensions( &self, font_id: FontId, font_size: Pixels, - max_line_number_width: Pixels, + style: &EditorStyle, + window: &mut Window, cx: &App, - ) -> Option { - if !self.show_gutter { - return None; - } - - let ch_width = cx.text_system().ch_width(font_id, font_size).log_err()?; - let ch_advance = cx.text_system().ch_advance(font_id, font_size).log_err()?; + ) -> GutterDimensions { + if self.show_gutter + && let Some(ch_width) = cx.text_system().ch_width(font_id, font_size).log_err() + && let Some(ch_advance) = cx.text_system().ch_advance(font_id, font_size).log_err() + { + let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { + matches!( + ProjectSettings::get_global(cx).git.git_gutter, + GitGutterSetting::TrackedFiles + ) + }); + let gutter_settings = EditorSettings::get_global(cx).gutter; + let show_line_numbers = self + .show_line_numbers + .unwrap_or(gutter_settings.line_numbers); + let line_gutter_width = if show_line_numbers { + // Avoid flicker-like gutter resizes when the line number gains another digit by + // only resizing the gutter on files with > 10**min_line_number_digits lines. + let min_width_for_number_on_gutter = + ch_advance * gutter_settings.min_line_number_digits as f32; + self.max_line_number_width(style, window) + .max(min_width_for_number_on_gutter) + } else { + 0.0.into() + }; - let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { - matches!( - ProjectSettings::get_global(cx).git.git_gutter, - GitGutterSetting::TrackedFiles - ) - }); - let gutter_settings = EditorSettings::get_global(cx).gutter; - let show_line_numbers = self - .show_line_numbers - .unwrap_or(gutter_settings.line_numbers); - let line_gutter_width = if show_line_numbers { - // Avoid flicker-like gutter resizes when the line number gains another digit by - // only resizing the gutter on files with > 10**min_line_number_digits lines. - let min_width_for_number_on_gutter = - ch_advance * gutter_settings.min_line_number_digits as f32; - max_line_number_width.max(min_width_for_number_on_gutter) - } else { - 0.0.into() - }; + let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); + let show_breakpoints = self.show_breakpoints.unwrap_or(gutter_settings.breakpoints); - let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); - let show_breakpoints = self.show_breakpoints.unwrap_or(gutter_settings.breakpoints); + let git_blame_entries_width = + self.git_blame_gutter_max_author_length + .map(|max_author_length| { + let renderer = cx.global::().0.clone(); + const MAX_RELATIVE_TIMESTAMP: &str = "60 minutes ago"; - let git_blame_entries_width = - self.git_blame_gutter_max_author_length - .map(|max_author_length| { - let renderer = cx.global::().0.clone(); - const MAX_RELATIVE_TIMESTAMP: &str = "60 minutes ago"; + /// The number of characters to dedicate to gaps and margins. + const SPACING_WIDTH: usize = 4; - /// The number of characters to dedicate to gaps and margins. - const SPACING_WIDTH: usize = 4; + let max_char_count = max_author_length.min(renderer.max_author_length()) + + ::git::SHORT_SHA_LENGTH + + MAX_RELATIVE_TIMESTAMP.len() + + SPACING_WIDTH; - let max_char_count = max_author_length.min(renderer.max_author_length()) - + ::git::SHORT_SHA_LENGTH - + MAX_RELATIVE_TIMESTAMP.len() - + SPACING_WIDTH; + ch_advance * max_char_count + }); - ch_advance * max_char_count - }); + let is_singleton = self.buffer_snapshot().is_singleton(); + + let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); + left_padding += if !is_singleton { + ch_width * 4.0 + } else if show_runnables || show_breakpoints { + ch_width * 3.0 + } else if show_git_gutter && show_line_numbers { + ch_width * 2.0 + } else if show_git_gutter || show_line_numbers { + ch_width + } else { + px(0.) + }; - let is_singleton = self.buffer_snapshot().is_singleton(); - - let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); - left_padding += if !is_singleton { - ch_width * 4.0 - } else if show_runnables || show_breakpoints { - ch_width * 3.0 - } else if show_git_gutter && show_line_numbers { - ch_width * 2.0 - } else if show_git_gutter || show_line_numbers { - ch_width - } else { - px(0.) - }; + let shows_folds = is_singleton && gutter_settings.folds; - let shows_folds = is_singleton && gutter_settings.folds; + let right_padding = if shows_folds && show_line_numbers { + ch_width * 4.0 + } else if shows_folds || (!is_singleton && show_line_numbers) { + ch_width * 3.0 + } else if show_line_numbers { + ch_width + } else { + px(0.) + }; - let right_padding = if shows_folds && show_line_numbers { - ch_width * 4.0 - } else if shows_folds || (!is_singleton && show_line_numbers) { - ch_width * 3.0 - } else if show_line_numbers { - ch_width + GutterDimensions { + left_padding, + right_padding, + width: line_gutter_width + left_padding + right_padding, + margin: GutterDimensions::default_gutter_margin(font_id, font_size, cx), + git_blame_entries_width, + } + } else if self.offset_content { + GutterDimensions::default_with_margin(font_id, font_size, cx) } else { - px(0.) - }; - - Some(GutterDimensions { - left_padding, - right_padding, - width: line_gutter_width + left_padding + right_padding, - margin: GutterDimensions::default_gutter_margin(font_id, font_size, cx), - git_blame_entries_width, - }) + GutterDimensions::default() + } } pub fn render_crease_toggle( @@ -23957,6 +25004,28 @@ impl EditorSnapshot { None } } + + pub fn max_line_number_width(&self, style: &EditorStyle, window: &mut Window) -> Pixels { + let digit_count = self.widest_line_number().ilog10() + 1; + column_pixels(style, digit_count as usize, window) + } +} + +pub fn column_pixels(style: &EditorStyle, column: usize, window: &Window) -> Pixels { + let font_size = style.text.font_size.to_pixels(window.rem_size()); + let layout = window.text_system().shape_line( + SharedString::from(" ".repeat(column)), + font_size, + &[TextRun { + len: column, + font: style.text.font(), + color: Hsla::default(), + ..Default::default() + }], + None, + ); + + layout.width } impl Deref for EditorSnapshot { @@ -24037,57 +25106,7 @@ impl Focusable for Editor { impl Render for Editor { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - - let mut text_style = match self.mode { - EditorMode::SingleLine | EditorMode::AutoHeight { .. } => TextStyle { - color: cx.theme().colors().editor_foreground, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - line_height: relative(settings.buffer_line_height.value()), - ..Default::default() - }, - EditorMode::Full { .. } | EditorMode::Minimap { .. } => TextStyle { - color: cx.theme().colors().editor_foreground, - font_family: settings.buffer_font.family.clone(), - font_features: settings.buffer_font.features.clone(), - font_fallbacks: settings.buffer_font.fallbacks.clone(), - font_size: settings.buffer_font_size(cx).into(), - font_weight: settings.buffer_font.weight, - line_height: relative(settings.buffer_line_height.value()), - ..Default::default() - }, - }; - if let Some(text_style_refinement) = &self.text_style_refinement { - text_style.refine(text_style_refinement) - } - - let background = match self.mode { - EditorMode::SingleLine => cx.theme().system().transparent, - EditorMode::AutoHeight { .. } => cx.theme().system().transparent, - EditorMode::Full { .. } => cx.theme().colors().editor_background, - EditorMode::Minimap { .. } => cx.theme().colors().editor_background.opacity(0.7), - }; - - EditorElement::new( - &cx.entity(), - EditorStyle { - background, - border: cx.theme().colors().border, - local_player: cx.theme().players().local(), - text: text_style, - scrollbar_width: EditorElement::SCROLLBAR_WIDTH, - syntax: cx.theme().syntax().clone(), - status: cx.theme().status().clone(), - inlay_hints_style: make_inlay_hints_style(cx), - edit_prediction_styles: make_suggestion_styles(cx), - unnecessary_code_fade: ThemeSettings::get_global(cx).unnecessary_code_fade, - show_underlines: self.diagnostics_enabled(), - }, - ) + EditorElement::new(&cx.entity(), self.create_style(cx)) } } @@ -24100,10 +25119,16 @@ impl EntityInputHandler for Editor { cx: &mut Context, ) -> Option { let snapshot = self.buffer.read(cx).read(cx); - let start = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.start), Bias::Left); - let end = snapshot.clip_offset_utf16(OffsetUtf16(range_utf16.end), Bias::Right); - if (start.0..end.0) != range_utf16 { - adjusted_range.replace(start.0..end.0); + let start = snapshot.clip_offset_utf16( + MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.start)), + Bias::Left, + ); + let end = snapshot.clip_offset_utf16( + MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.end)), + Bias::Right, + ); + if (start.0.0..end.0.0) != range_utf16 { + adjusted_range.replace(start.0.0..end.0.0); } Some(snapshot.text_for_range(start..end).collect()) } @@ -24122,11 +25147,11 @@ impl EntityInputHandler for Editor { let selection = self .selections - .newest::(&self.display_snapshot(cx)); + .newest::(&self.display_snapshot(cx)); let range = selection.range(); Some(UTF16Selection { - range: range.start.0..range.end.0, + range: range.start.0.0..range.end.0.0, reversed: selection.reversed, }) } @@ -24134,7 +25159,7 @@ impl EntityInputHandler for Editor { fn marked_text_range(&self, _: &mut Window, cx: &mut Context) -> Option> { let snapshot = self.buffer.read(cx).read(cx); let range = self.text_highlights::(cx)?.1.first()?; - Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0) + Some(range.start.to_offset_utf16(&snapshot).0.0..range.end.to_offset_utf16(&snapshot).0.0) } fn unmark_text(&mut self, _: &mut Window, cx: &mut Context) { @@ -24156,7 +25181,8 @@ impl EntityInputHandler for Editor { self.transact(window, cx, |this, window, cx| { let new_selected_ranges = if let Some(range_utf16) = range_utf16 { - let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + let range_utf16 = MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.start)) + ..MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.end)); Some(this.selection_replacement_ranges(range_utf16, cx)) } else { this.marked_text_ranges(cx) @@ -24165,14 +25191,14 @@ impl EntityInputHandler for Editor { let range_to_replace = new_selected_ranges.as_ref().and_then(|ranges_to_replace| { let newest_selection_id = this.selections.newest_anchor().id; this.selections - .all::(&this.display_snapshot(cx)) + .all::(&this.display_snapshot(cx)) .iter() .zip(ranges_to_replace.iter()) .find_map(|(selection, range)| { if selection.id == newest_selection_id { Some( - (range.start.0 as isize - selection.head().0 as isize) - ..(range.end.0 as isize - selection.head().0 as isize), + (range.start.0.0 as isize - selection.head().0.0 as isize) + ..(range.end.0.0 as isize - selection.head().0.0 as isize), ) } else { None @@ -24221,8 +25247,8 @@ impl EntityInputHandler for Editor { let snapshot = this.buffer.read(cx).read(cx); if let Some(relative_range_utf16) = range_utf16.as_ref() { for marked_range in &mut marked_ranges { - marked_range.end.0 = marked_range.start.0 + relative_range_utf16.end; - marked_range.start.0 += relative_range_utf16.start; + marked_range.end = marked_range.start + relative_range_utf16.end; + marked_range.start += relative_range_utf16.start; marked_range.start = snapshot.clip_offset_utf16(marked_range.start, Bias::Left); marked_range.end = @@ -24231,7 +25257,8 @@ impl EntityInputHandler for Editor { } Some(marked_ranges) } else if let Some(range_utf16) = range_utf16 { - let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end); + let range_utf16 = MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.start)) + ..MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.end)); Some(this.selection_replacement_ranges(range_utf16, cx)) } else { None @@ -24240,14 +25267,14 @@ impl EntityInputHandler for Editor { let range_to_replace = ranges_to_replace.as_ref().and_then(|ranges_to_replace| { let newest_selection_id = this.selections.newest_anchor().id; this.selections - .all::(&this.display_snapshot(cx)) + .all::(&this.display_snapshot(cx)) .iter() .zip(ranges_to_replace.iter()) .find_map(|(selection, range)| { if selection.id == newest_selection_id { Some( - (range.start.0 as isize - selection.head().0 as isize) - ..(range.end.0 as isize - selection.head().0 as isize), + (range.start.0.0 as isize - selection.head().0.0 as isize) + ..(range.end.0.0 as isize - selection.head().0.0 as isize), ) } else { None @@ -24309,8 +25336,12 @@ impl EntityInputHandler for Editor { .into_iter() .map(|marked_range| { let insertion_start = marked_range.start.to_offset_utf16(&snapshot).0; - let new_start = OffsetUtf16(new_selected_range.start + insertion_start); - let new_end = OffsetUtf16(new_selected_range.end + insertion_start); + let new_start = MultiBufferOffsetUtf16(OffsetUtf16( + insertion_start.0 + new_selected_range.start, + )); + let new_end = MultiBufferOffsetUtf16(OffsetUtf16( + insertion_start.0 + new_selected_range.end, + )); snapshot.clip_offset_utf16(new_start, Bias::Left) ..snapshot.clip_offset_utf16(new_end, Bias::Right) }) @@ -24353,7 +25384,8 @@ impl EntityInputHandler for Editor { let scroll_position = snapshot.scroll_position(); let scroll_left = scroll_position.x * ScrollOffset::from(em_advance); - let start = OffsetUtf16(range_utf16.start).to_display_point(&snapshot); + let start = + MultiBufferOffsetUtf16(OffsetUtf16(range_utf16.start)).to_display_point(&snapshot); let x = Pixels::from( ScrollOffset::from( snapshot.x_for_display_point(start, &text_layout_details) @@ -24383,7 +25415,7 @@ impl EntityInputHandler for Editor { .snapshot .display_point_to_anchor(display_point, Bias::Left); let utf16_offset = anchor.to_offset_utf16(&position_map.snapshot.buffer_snapshot()); - Some(utf16_offset.0) + Some(utf16_offset.0.0) } fn accepts_text_input(&self, _window: &mut Window, _cx: &mut Context) -> bool { @@ -24543,6 +25575,7 @@ pub fn diagnostic_style(severity: lsp::DiagnosticSeverity, colors: &StatusColors pub fn styled_runs_for_code_label<'a>( label: &'a CodeLabel, syntax_theme: &'a theme::SyntaxTheme, + local_player: &'a theme::PlayerColor, ) -> impl 'a + Iterator, HighlightStyle)> { let fade_out = HighlightStyle { fade_out: Some(0.35), @@ -24555,7 +25588,17 @@ pub fn styled_runs_for_code_label<'a>( .iter() .enumerate() .flat_map(move |(ix, (range, highlight_id))| { - let style = if let Some(style) = highlight_id.style(syntax_theme) { + let style = if *highlight_id == language::HighlightId::TABSTOP_INSERT_ID { + HighlightStyle { + color: Some(local_player.cursor), + ..Default::default() + } + } else if *highlight_id == language::HighlightId::TABSTOP_REPLACE_ID { + HighlightStyle { + background_color: Some(local_player.selection), + ..Default::default() + } + } else if let Some(style) = highlight_id.style(syntax_theme) { style } else { return Default::default(); @@ -24604,6 +25647,33 @@ pub(crate) fn split_words(text: &str) -> impl std::iter::Iterator + }) } +/// Given a string of text immediately before the cursor, iterates over possible +/// strings a snippet could match to. More precisely: returns an iterator over +/// suffixes of `text` created by splitting at word boundaries (before & after +/// every non-word character). +/// +/// Shorter suffixes are returned first. +pub(crate) fn snippet_candidate_suffixes( + text: &str, + is_word_char: impl Fn(char) -> bool, +) -> impl std::iter::Iterator { + let mut prev_index = text.len(); + let mut prev_codepoint = None; + text.char_indices() + .rev() + .chain([(0, '\0')]) + .filter_map(move |(index, codepoint)| { + let prev_index = std::mem::replace(&mut prev_index, index); + let prev_codepoint = prev_codepoint.replace(codepoint)?; + if is_word_char(prev_codepoint) && is_word_char(codepoint) { + None + } else { + let chunk = &text[prev_index..]; // go to end of string + Some(chunk) + } + }) +} + pub trait RangeToAnchorExt: Sized { fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range; @@ -24839,7 +25909,7 @@ impl BreakpointPromptEditor { self.editor .update(cx, |editor, cx| { editor.remove_blocks(self.block_ids.clone(), None, cx); - window.focus(&editor.focus_handle); + window.focus(&editor.focus_handle, cx); }) .log_err(); } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 4510e61b74c9bd9ca8ace634f7554f63c4981dd7..48e59f7b7420473054214572a2908215f98ffded 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2,7 +2,7 @@ use super::*; use crate::{ JoinLines, code_context_menus::CodeContextMenu, - edit_prediction_tests::FakeEditPredictionProvider, + edit_prediction_tests::FakeEditPredictionDelegate, element::StickyHeader, linked_editing_ranges::LinkedEditingRanges, scroll::scroll_amount::ScrollAmount, @@ -17,8 +17,8 @@ use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkS use collections::HashMap; use futures::{StreamExt, channel::oneshot}; use gpui::{ - BackgroundExecutor, DismissEvent, Rgba, SemanticVersion, TestAppContext, UpdateGlobal, - VisualTestContext, WindowBounds, WindowOptions, div, + BackgroundExecutor, DismissEvent, Rgba, TestAppContext, UpdateGlobal, VisualTestContext, + WindowBounds, WindowOptions, div, }; use indoc::indoc; use language::{ @@ -32,20 +32,25 @@ use language::{ tree_sitter_python, }; use language_settings::Formatter; +use languages::markdown_lang; use languages::rust_lang; use lsp::CompletionParams; -use multi_buffer::{IndentGuide, PathKey}; +use multi_buffer::{ + IndentGuide, MultiBufferFilterMode, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, +}; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_ne}; use project::{ - FakeFs, + FakeFs, Project, debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}, project_settings::LspSettings, + trusted_worktrees::{PathTrust, TrustedWorktrees}, }; use serde_json::{self, json}; use settings::{ AllLanguageSettingsContent, EditorSettingsContent, IndentGuideBackgroundColoring, - IndentGuideColoring, ProjectSettingsContent, SearchSettingsContent, + IndentGuideColoring, InlayHintSettingsContent, ProjectSettingsContent, SearchSettingsContent, + SettingsStore, }; use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant}; use std::{ @@ -64,7 +69,6 @@ use util::{ use workspace::{ CloseActiveItem, CloseAllItems, CloseOtherItems, MoveItemToPaneInDirection, NavigationEntry, OpenOptions, ViewId, - invalid_item_view::InvalidItemView, item::{FollowEvent, FollowableItem, Item, ItemHandle, SaveOptions}, register_project_item, }; @@ -196,7 +200,7 @@ fn test_edit_events(cx: &mut TestAppContext) { // No event is emitted when the mutation is a no-op. _ = editor2.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([0..0]) + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) }); editor.backspace(&Backspace, window, cx); @@ -221,7 +225,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { editor.start_transaction_at(now, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([2..4]) + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(4)]) }); editor.insert("cd", window, cx); @@ -229,38 +233,46 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "12cd56"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![4..4] + vec![MultiBufferOffset(4)..MultiBufferOffset(4)] ); editor.start_transaction_at(now, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([4..5]) + s.select_ranges([MultiBufferOffset(4)..MultiBufferOffset(5)]) }); editor.insert("e", window, cx); editor.end_transaction_at(now, cx); assert_eq!(editor.text(cx), "12cde6"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![5..5] + vec![MultiBufferOffset(5)..MultiBufferOffset(5)] ); now += group_interval + Duration::from_millis(1); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([2..2]) + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(2)]) }); // Simulate an edit in another editor buffer.update(cx, |buffer, cx| { buffer.start_transaction_at(now, cx); - buffer.edit([(0..1, "a")], None, cx); - buffer.edit([(1..1, "b")], None, cx); + buffer.edit( + [(MultiBufferOffset(0)..MultiBufferOffset(1), "a")], + None, + cx, + ); + buffer.edit( + [(MultiBufferOffset(1)..MultiBufferOffset(1), "b")], + None, + cx, + ); buffer.end_transaction_at(now, cx); }); assert_eq!(editor.text(cx), "ab2cde6"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![3..3] + vec![MultiBufferOffset(3)..MultiBufferOffset(3)] ); // Last transaction happened past the group interval in a different editor. @@ -269,7 +281,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "12cde6"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![2..2] + vec![MultiBufferOffset(2)..MultiBufferOffset(2)] ); // First two transactions happened within the group interval in this editor. @@ -279,7 +291,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "123456"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![0..0] + vec![MultiBufferOffset(0)..MultiBufferOffset(0)] ); // Redo the first two transactions together. @@ -287,7 +299,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "12cde6"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![5..5] + vec![MultiBufferOffset(5)..MultiBufferOffset(5)] ); // Redo the last transaction on its own. @@ -295,7 +307,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "ab2cde6"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - vec![6..6] + vec![MultiBufferOffset(6)..MultiBufferOffset(6)] ); // Test empty transactions. @@ -328,7 +340,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "äbcde"); assert_eq!( editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + Some(vec![ + MultiBufferOffsetUtf16(OffsetUtf16(0))..MultiBufferOffsetUtf16(OffsetUtf16(1)) + ]) ); // Finalize IME composition. @@ -348,7 +362,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { editor.replace_and_mark_text_in_range(Some(0..1), "à", None, window, cx); assert_eq!( editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(0)..OffsetUtf16(1)]) + Some(vec![ + MultiBufferOffsetUtf16(OffsetUtf16(0))..MultiBufferOffsetUtf16(OffsetUtf16(1)) + ]) ); // Undoing during an IME composition cancels it. @@ -361,7 +377,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { assert_eq!(editor.text(cx), "ābcdè"); assert_eq!( editor.marked_text_ranges(cx), - Some(vec![OffsetUtf16(4)..OffsetUtf16(5)]) + Some(vec![ + MultiBufferOffsetUtf16(OffsetUtf16(4))..MultiBufferOffsetUtf16(OffsetUtf16(5)) + ]) ); // Finalize IME composition with an invalid replacement range, ensuring it gets clipped. @@ -372,9 +390,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { // Start a new IME composition with multiple cursors. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([ - OffsetUtf16(1)..OffsetUtf16(1), - OffsetUtf16(3)..OffsetUtf16(3), - OffsetUtf16(5)..OffsetUtf16(5), + MultiBufferOffsetUtf16(OffsetUtf16(1))..MultiBufferOffsetUtf16(OffsetUtf16(1)), + MultiBufferOffsetUtf16(OffsetUtf16(3))..MultiBufferOffsetUtf16(OffsetUtf16(3)), + MultiBufferOffsetUtf16(OffsetUtf16(5))..MultiBufferOffsetUtf16(OffsetUtf16(5)), ]) }); editor.replace_and_mark_text_in_range(Some(4..5), "XYZ", None, window, cx); @@ -382,9 +400,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { assert_eq!( editor.marked_text_ranges(cx), Some(vec![ - OffsetUtf16(0)..OffsetUtf16(3), - OffsetUtf16(4)..OffsetUtf16(7), - OffsetUtf16(8)..OffsetUtf16(11) + MultiBufferOffsetUtf16(OffsetUtf16(0))..MultiBufferOffsetUtf16(OffsetUtf16(3)), + MultiBufferOffsetUtf16(OffsetUtf16(4))..MultiBufferOffsetUtf16(OffsetUtf16(7)), + MultiBufferOffsetUtf16(OffsetUtf16(8))..MultiBufferOffsetUtf16(OffsetUtf16(11)) ]) ); @@ -394,9 +412,9 @@ fn test_ime_composition(cx: &mut TestAppContext) { assert_eq!( editor.marked_text_ranges(cx), Some(vec![ - OffsetUtf16(1)..OffsetUtf16(2), - OffsetUtf16(5)..OffsetUtf16(6), - OffsetUtf16(9)..OffsetUtf16(10) + MultiBufferOffsetUtf16(OffsetUtf16(1))..MultiBufferOffsetUtf16(OffsetUtf16(2)), + MultiBufferOffsetUtf16(OffsetUtf16(5))..MultiBufferOffsetUtf16(OffsetUtf16(6)), + MultiBufferOffsetUtf16(OffsetUtf16(9))..MultiBufferOffsetUtf16(OffsetUtf16(10)) ]) ); @@ -756,7 +774,11 @@ fn test_clone(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(selection_ranges.clone()) + s.select_ranges( + selection_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }); editor.fold_creases( vec![ @@ -793,9 +815,11 @@ fn test_clone(cx: &mut TestAppContext) { ); assert_eq!( cloned_snapshot - .folds_in_range(0..text.len()) + .folds_in_range(MultiBufferOffset(0)..MultiBufferOffset(text.len())) + .collect::>(), + snapshot + .folds_in_range(MultiBufferOffset(0)..MultiBufferOffset(text.len())) .collect::>(), - snapshot.folds_in_range(0..text.len()).collect::>(), ); assert_set_eq!( cloned_editor @@ -1417,7 +1441,11 @@ fn test_fold_at_level(cx: &mut TestAppContext) { ); editor.change_selections(SelectionEffects::default(), window, cx, |s| { - s.select_ranges(positions) + s.select_ranges( + positions + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }); editor.fold_at_level(&FoldAtLevel(2), window, cx); @@ -2191,10 +2219,9 @@ async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut TestAppContext) init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let line_height = cx.editor(|editor, window, _| { + let line_height = cx.update_editor(|editor, window, cx| { editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()) }); @@ -2307,10 +2334,9 @@ async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut TestAppContext) async fn test_scroll_page_up_page_down(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let line_height = cx.editor(|editor, window, _| { + let line_height = cx.update_editor(|editor, window, cx| { editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()) }); @@ -2373,8 +2399,7 @@ async fn test_autoscroll(cx: &mut TestAppContext) { let line_height = cx.update_editor(|editor, window, cx| { editor.set_vertical_scroll_margin(2, cx); editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()) }); @@ -2453,10 +2478,9 @@ async fn test_move_page_up_page_down(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; - let line_height = cx.editor(|editor, window, _cx| { + let line_height = cx.update_editor(|editor, window, cx| { editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()) }); @@ -3699,7 +3723,11 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) { let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx); let mut editor = build_editor(buffer, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([3..4, 11..12, 19..20]) + s.select_ranges([ + MultiBufferOffset(3)..MultiBufferOffset(4), + MultiBufferOffset(11)..MultiBufferOffset(12), + MultiBufferOffset(19)..MultiBufferOffset(20), + ]) }); editor }); @@ -3707,12 +3735,24 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { // Edit the buffer directly, deleting ranges surrounding the editor's selections editor.buffer.update(cx, |buffer, cx| { - buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx); + buffer.edit( + [ + (MultiBufferOffset(2)..MultiBufferOffset(5), ""), + (MultiBufferOffset(10)..MultiBufferOffset(13), ""), + (MultiBufferOffset(18)..MultiBufferOffset(21), ""), + ], + None, + cx, + ); assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent()); }); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - &[2..2, 7..7, 12..12], + &[ + MultiBufferOffset(2)..MultiBufferOffset(2), + MultiBufferOffset(7)..MultiBufferOffset(7), + MultiBufferOffset(12)..MultiBufferOffset(12) + ], ); editor.insert("Z", window, cx); @@ -3721,7 +3761,11 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) { // The selections are moved after the inserted characters assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - &[3..3, 9..9, 15..15], + &[ + MultiBufferOffset(3)..MultiBufferOffset(3), + MultiBufferOffset(9)..MultiBufferOffset(9), + MultiBufferOffset(15)..MultiBufferOffset(15) + ], ); }); } @@ -4691,7 +4735,7 @@ async fn test_custom_newlines_cause_no_false_positive_diffs( assert_eq!( snapshot .buffer_snapshot() - .diff_hunks_in_range(0..snapshot.buffer_snapshot().len()) + .diff_hunks_in_range(MultiBufferOffset(0)..snapshot.buffer_snapshot().len()) .collect::>(), Vec::new(), "Should not have any diffs for files with custom newlines" @@ -5730,6 +5774,116 @@ fn test_duplicate_line(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_rotate_selections(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + // Rotate text selections (horizontal) + cx.set_state("x=«1ˇ», y=«2ˇ», z=«3ˇ»"); + cx.update_editor(|e, window, cx| { + e.rotate_selections_forward(&RotateSelectionsForward, window, cx) + }); + cx.assert_editor_state("x=«3ˇ», y=«1ˇ», z=«2ˇ»"); + cx.update_editor(|e, window, cx| { + e.rotate_selections_backward(&RotateSelectionsBackward, window, cx) + }); + cx.assert_editor_state("x=«1ˇ», y=«2ˇ», z=«3ˇ»"); + + // Rotate text selections (vertical) + cx.set_state(indoc! {" + x=«1ˇ» + y=«2ˇ» + z=«3ˇ» + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_forward(&RotateSelectionsForward, window, cx) + }); + cx.assert_editor_state(indoc! {" + x=«3ˇ» + y=«1ˇ» + z=«2ˇ» + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_backward(&RotateSelectionsBackward, window, cx) + }); + cx.assert_editor_state(indoc! {" + x=«1ˇ» + y=«2ˇ» + z=«3ˇ» + "}); + + // Rotate text selections (vertical, different lengths) + cx.set_state(indoc! {" + x=\"«ˇ»\" + y=\"«aˇ»\" + z=\"«aaˇ»\" + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_forward(&RotateSelectionsForward, window, cx) + }); + cx.assert_editor_state(indoc! {" + x=\"«aaˇ»\" + y=\"«ˇ»\" + z=\"«aˇ»\" + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_backward(&RotateSelectionsBackward, window, cx) + }); + cx.assert_editor_state(indoc! {" + x=\"«ˇ»\" + y=\"«aˇ»\" + z=\"«aaˇ»\" + "}); + + // Rotate whole lines (cursor positions preserved) + cx.set_state(indoc! {" + ˇline123 + liˇne23 + line3ˇ + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_forward(&RotateSelectionsForward, window, cx) + }); + cx.assert_editor_state(indoc! {" + line3ˇ + ˇline123 + liˇne23 + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_backward(&RotateSelectionsBackward, window, cx) + }); + cx.assert_editor_state(indoc! {" + ˇline123 + liˇne23 + line3ˇ + "}); + + // Rotate whole lines, multiple cursors per line (positions preserved) + cx.set_state(indoc! {" + ˇliˇne123 + ˇline23 + ˇline3 + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_forward(&RotateSelectionsForward, window, cx) + }); + cx.assert_editor_state(indoc! {" + ˇline3 + ˇliˇne123 + ˇline23 + "}); + cx.update_editor(|e, window, cx| { + e.rotate_selections_backward(&RotateSelectionsBackward, window, cx) + }); + cx.assert_editor_state(indoc! {" + ˇliˇne123 + ˇline23 + ˇline3 + "}); +} + #[gpui::test] fn test_move_line_up_down(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -5963,27 +6117,27 @@ fn test_transpose(cx: &mut TestAppContext) { let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), window, cx); editor.set_style(EditorStyle::default(), window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1]) + s.select_ranges([MultiBufferOffset(1)..MultiBufferOffset(1)]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bac"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [2..2] + [MultiBufferOffset(2)..MultiBufferOffset(2)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bca"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [3..3] + [MultiBufferOffset(3)..MultiBufferOffset(3)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bac"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [3..3] + [MultiBufferOffset(3)..MultiBufferOffset(3)] ); editor @@ -5993,37 +6147,37 @@ fn test_transpose(cx: &mut TestAppContext) { let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), window, cx); editor.set_style(EditorStyle::default(), window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([3..3]) + s.select_ranges([MultiBufferOffset(3)..MultiBufferOffset(3)]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acb\nde"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [3..3] + [MultiBufferOffset(3)..MultiBufferOffset(3)] ); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([4..4]) + s.select_ranges([MultiBufferOffset(4)..MultiBufferOffset(4)]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbd\ne"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [5..5] + [MultiBufferOffset(5)..MultiBufferOffset(5)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbde\n"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [6..6] + [MultiBufferOffset(6)..MultiBufferOffset(6)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbd\ne"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [6..6] + [MultiBufferOffset(6)..MultiBufferOffset(6)] ); editor @@ -6033,41 +6187,62 @@ fn test_transpose(cx: &mut TestAppContext) { let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), window, cx); editor.set_style(EditorStyle::default(), window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1, 2..2, 4..4]) + s.select_ranges([ + MultiBufferOffset(1)..MultiBufferOffset(1), + MultiBufferOffset(2)..MultiBufferOffset(2), + MultiBufferOffset(4)..MultiBufferOffset(4), + ]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bacd\ne"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [2..2, 3..3, 5..5] + [ + MultiBufferOffset(2)..MultiBufferOffset(2), + MultiBufferOffset(3)..MultiBufferOffset(3), + MultiBufferOffset(5)..MultiBufferOffset(5) + ] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcade\n"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [3..3, 4..4, 6..6] + [ + MultiBufferOffset(3)..MultiBufferOffset(3), + MultiBufferOffset(4)..MultiBufferOffset(4), + MultiBufferOffset(6)..MultiBufferOffset(6) + ] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcda\ne"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [4..4, 6..6] + [ + MultiBufferOffset(4)..MultiBufferOffset(4), + MultiBufferOffset(6)..MultiBufferOffset(6) + ] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcade\n"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [4..4, 6..6] + [ + MultiBufferOffset(4)..MultiBufferOffset(4), + MultiBufferOffset(6)..MultiBufferOffset(6) + ] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcaed\n"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [5..5, 6..6] + [ + MultiBufferOffset(5)..MultiBufferOffset(5), + MultiBufferOffset(6)..MultiBufferOffset(6) + ] ); editor @@ -6077,27 +6252,27 @@ fn test_transpose(cx: &mut TestAppContext) { let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), window, cx); editor.set_style(EditorStyle::default(), window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([4..4]) + s.select_ranges([MultiBufferOffset(4)..MultiBufferOffset(4)]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀🍐✋"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [8..8] + [MultiBufferOffset(8)..MultiBufferOffset(8)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀✋🍐"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [11..11] + [MultiBufferOffset(11)..MultiBufferOffset(11)] ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀🍐✋"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [11..11] + [MultiBufferOffset(11)..MultiBufferOffset(11)] ); editor @@ -7576,10 +7751,12 @@ fn test_select_line(cx: &mut TestAppContext) { ]) }); editor.select_line(&SelectLine, window, cx); + // Adjacent line selections should NOT merge (only overlapping ones do) assert_eq!( display_ranges(editor, cx), vec![ - DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(2), 0), + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(1), 0), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(2), 0), DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 0), ] ); @@ -7598,9 +7775,13 @@ fn test_select_line(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { editor.select_line(&SelectLine, window, cx); + // Adjacent but not overlapping, so they stay separate assert_eq!( display_ranges(editor, cx), - vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(5), 5)] + vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(4), 0), + DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 5), + ] ); }); } @@ -8568,7 +8749,7 @@ async fn test_undo_edit_prediction_scrolls_to_edit_pos(cx: &mut TestAppContext) let mut cx = EditorTestContext::new(cx).await; - let provider = cx.new(|_| FakeEditPredictionProvider::default()); + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); cx.update_editor(|editor, window, cx| { editor.set_edit_prediction_provider(Some(provider.clone()), window, cx); }); @@ -8591,7 +8772,7 @@ async fn test_undo_edit_prediction_scrolls_to_edit_pos(cx: &mut TestAppContext) cx.update(|_, cx| { provider.update(cx, |provider, _| { - provider.set_edit_prediction(Some(edit_prediction::EditPrediction::Local { + provider.set_edit_prediction(Some(edit_prediction_types::EditPrediction::Local { id: None, edits: vec![(edit_position..edit_position, "X".into())], edit_preview: None, @@ -9730,7 +9911,11 @@ async fn test_autoindent(cx: &mut TestAppContext) { editor.update_in(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([5..5, 8..8, 9..9]) + s.select_ranges([ + MultiBufferOffset(5)..MultiBufferOffset(5), + MultiBufferOffset(8)..MultiBufferOffset(8), + MultiBufferOffset(9)..MultiBufferOffset(9), + ]) }); editor.newline(&Newline, window, cx); assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); @@ -9795,7 +9980,11 @@ async fn test_autoindent_disabled(cx: &mut TestAppContext) { editor.update_in(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([5..5, 8..8, 9..9]) + s.select_ranges([ + MultiBufferOffset(5)..MultiBufferOffset(5), + MultiBufferOffset(8)..MultiBufferOffset(8), + MultiBufferOffset(9)..MultiBufferOffset(9), + ]) }); editor.newline(&Newline, window, cx); assert_eq!( @@ -9894,7 +10083,7 @@ async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext) ], ..Default::default() }, - name: LanguageName::new("rust"), + name: LanguageName::new_static("rust"), ..Default::default() }, Some(tree_sitter_rust::LANGUAGE.into()), @@ -10452,7 +10641,7 @@ async fn test_autoclose_with_embedded_language(cx: &mut TestAppContext) { let snapshot = editor.snapshot(window, cx); let cursors = editor .selections - .ranges::(&editor.display_snapshot(cx)); + .ranges::(&editor.display_snapshot(cx)); let languages = cursors .iter() .map(|c| snapshot.language_at(c.start).unwrap().name()) @@ -10681,6 +10870,115 @@ async fn test_autoclose_with_overrides(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_autoclose_quotes_with_scope_awareness(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = languages::language("python", tree_sitter_python::LANGUAGE.into()); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // Double quote inside single-quoted string + cx.set_state(indoc! {r#" + def main(): + items = ['"', ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("\"", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ['"', "ˇ"] + "#}); + + // Two double quotes inside single-quoted string + cx.set_state(indoc! {r#" + def main(): + items = ['""', ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("\"", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ['""', "ˇ"] + "#}); + + // Single quote inside double-quoted string + cx.set_state(indoc! {r#" + def main(): + items = ["'", ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("'", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ["'", 'ˇ'] + "#}); + + // Two single quotes inside double-quoted string + cx.set_state(indoc! {r#" + def main(): + items = ["''", ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("'", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ["''", 'ˇ'] + "#}); + + // Mixed quotes on same line + cx.set_state(indoc! {r#" + def main(): + items = ['"""', "'''''", ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("\"", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ['"""', "'''''", "ˇ"] + "#}); + cx.update_editor(|editor, window, cx| { + editor.move_right(&MoveRight, window, cx); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(", ", window, cx); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input("'", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ['"""', "'''''", "", 'ˇ'] + "#}); +} + +#[gpui::test] +async fn test_autoclose_quotes_with_multibyte_characters(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = languages::language("python", tree_sitter_python::LANGUAGE.into()); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + cx.set_state(indoc! {r#" + def main(): + items = ["🎉", ˇ] + "#}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("\"", window, cx); + }); + cx.assert_editor_state(indoc! {r#" + def main(): + items = ["🎉", "ˇ"] + "#}); +} + #[gpui::test] async fn test_surround_with_pair(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -11142,17 +11440,26 @@ async fn test_snippet_placeholder_choices(cx: &mut TestAppContext) { let snippet = Snippet::parse("type ${1|,i32,u32|} = $2").unwrap(); editor - .insert_snippet(&insertion_ranges, snippet, window, cx) + .insert_snippet( + &insertion_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>(), + snippet, + window, + cx, + ) .unwrap(); fn assert(editor: &mut Editor, cx: &mut Context, marked_text: &str) { let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); assert_eq!(editor.text(cx), expected_text); assert_eq!( - editor - .selections - .ranges::(&editor.display_snapshot(cx)), + editor.selections.ranges(&editor.display_snapshot(cx)), selection_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>() ); } @@ -11176,10 +11483,11 @@ async fn test_snippet_tabstop_navigation_with_placeholders(cx: &mut TestAppConte let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); assert_eq!(editor.text(cx), expected_text); assert_eq!( - editor - .selections - .ranges::(&editor.display_snapshot(cx)), + editor.selections.ranges(&editor.display_snapshot(cx)), selection_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>() ); } @@ -11197,7 +11505,15 @@ async fn test_snippet_tabstop_navigation_with_placeholders(cx: &mut TestAppConte let snippet = Snippet::parse("type ${1|,i32,u32|} = $2; $3").unwrap(); editor - .insert_snippet(&insertion_ranges, snippet, window, cx) + .insert_snippet( + &insertion_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>(), + snippet, + window, + cx, + ) .unwrap(); assert_state( @@ -11414,6 +11730,53 @@ async fn test_snippet_indentation(cx: &mut TestAppContext) { ˇ"}); } +#[gpui::test] +async fn test_snippet_with_multi_word_prefix(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_editor(|editor, _, cx| { + editor.project().unwrap().update(cx, |project, cx| { + project.snippets().update(cx, |snippets, _cx| { + let snippet = project::snippet_provider::Snippet { + prefix: vec!["multi word".to_string()], + body: "this is many words".to_string(), + description: Some("description".to_string()), + name: "multi-word snippet test".to_string(), + }; + snippets.add_snippet_for_test( + None, + PathBuf::from("test_snippets.json"), + vec![Arc::new(snippet)], + ); + }); + }) + }); + + for (input_to_simulate, should_match_snippet) in [ + ("m", true), + ("m ", true), + ("m w", true), + ("aa m w", true), + ("aa m g", false), + ] { + cx.set_state("ˇ"); + cx.simulate_input(input_to_simulate); // fails correctly + + cx.update_editor(|editor, _, _| { + let Some(CodeContextMenu::Completions(context_menu)) = &*editor.context_menu.borrow() + else { + assert!(!should_match_snippet); // no completions! don't even show the menu + return; + }; + assert!(context_menu.visible()); + let completions = context_menu.completions.borrow(); + + assert_eq!(!completions.is_empty(), should_match_snippet); + }); + } +} + #[gpui::test] async fn test_document_format_during_save(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -11598,7 +11961,7 @@ async fn test_redo_after_noop_format(cx: &mut TestAppContext) { }); editor.update_in(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::default(), window, cx, |s| { - s.select_ranges([0..0]) + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) }); }); assert!(!cx.read(|cx| editor.is_dirty(cx))); @@ -11764,7 +12127,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(1..2)), + |s| s.select_ranges(Some(MultiBufferOffset(1)..MultiBufferOffset(2))), ); editor.insert("|one|two|three|", window, cx); }); @@ -11774,7 +12137,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(60..70)), + |s| s.select_ranges(Some(MultiBufferOffset(60)..MultiBufferOffset(70))), ); editor.insert("|four|five|six|", window, cx); }); @@ -11942,7 +12305,7 @@ async fn test_autosave_with_dirty_buffers(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(10..10)), + |s| s.select_ranges(Some(MultiBufferOffset(10)..MultiBufferOffset(10))), ); editor.insert("// edited", window, cx); }); @@ -13380,7 +13743,7 @@ async fn test_signature_help(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([0..0]) + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) }); }); @@ -14615,53 +14978,227 @@ async fn test_completion(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_completion_reuse(cx: &mut TestAppContext) { +async fn test_completion_can_run_commands(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let mut cx = EditorLspTestContext::new_rust( - lsp::ServerCapabilities { - completion_provider: Some(lsp::CompletionOptions { - trigger_characters: Some(vec![".".to_string()]), - ..Default::default() - }), - ..Default::default() - }, - cx, - ) - .await; - - let counter = Arc::new(AtomicUsize::new(0)); - cx.set_state("objˇ"); - cx.simulate_keystroke("."); - - // Initial completion request returns complete results - let is_incomplete = false; - handle_completion_request( - "obj.|<>", - vec!["a", "ab", "abc"], - is_incomplete, - counter.clone(), - &mut cx, + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "", + }), ) .await; - cx.run_until_parked(); - assert_eq!(counter.load(atomic::Ordering::Acquire), 1); - cx.assert_editor_state("obj.ˇ"); - check_displayed_completions(vec!["a", "ab", "abc"], &mut cx); - // Type "a" - filters existing completions - cx.simulate_keystroke("a"); - cx.run_until_parked(); - assert_eq!(counter.load(atomic::Ordering::Acquire), 1); - cx.assert_editor_state("obj.aˇ"); - check_displayed_completions(vec!["a", "ab", "abc"], &mut cx); - - // Type "b" - filters existing completions - cx.simulate_keystroke("b"); - cx.run_until_parked(); - assert_eq!(counter.load(atomic::Ordering::Acquire), 1); - cx.assert_editor_state("obj.abˇ"); - check_displayed_completions(vec!["ab", "abc"], &mut cx); + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let command_calls = Arc::new(AtomicUsize::new(0)); + let registered_command = "_the/command"; + + let closure_command_calls = command_calls.clone(); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string(), ":".to_string()]), + ..lsp::CompletionOptions::default() + }), + execute_command_provider: Some(lsp::ExecuteCommandOptions { + commands: vec![registered_command.to_owned()], + ..lsp::ExecuteCommandOptions::default() + }), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(move |fake_server| { + fake_server.set_request_handler::( + move |params, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "registered_command".to_owned(), + text_edit: gen_text_edit(¶ms, ""), + command: Some(lsp::Command { + title: registered_command.to_owned(), + command: "_the/command".to_owned(), + arguments: Some(vec![serde_json::Value::Bool(true)]), + }), + ..lsp::CompletionItem::default() + }, + lsp::CompletionItem { + label: "unregistered_command".to_owned(), + text_edit: gen_text_edit(¶ms, ""), + command: Some(lsp::Command { + title: "????????????".to_owned(), + command: "????????????".to_owned(), + arguments: Some(vec![serde_json::Value::Null]), + }), + ..lsp::CompletionItem::default() + }, + ]))) + }, + ); + fake_server.set_request_handler::({ + let command_calls = closure_command_calls.clone(); + move |params, _| { + assert_eq!(params.command, registered_command); + let command_calls = command_calls.clone(); + async move { + command_calls.fetch_add(1, atomic::Ordering::Release); + Ok(Some(json!(null))) + } + } + }); + })), + ..FakeLspAdapter::default() + }, + ); + let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let editor = workspace + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from(path!("/a/main.rs")), + OpenOptions::default(), + window, + cx, + ) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); + let _fake_server = fake_servers.next().await.unwrap(); + + editor.update_in(cx, |editor, window, cx| { + cx.focus_self(window); + editor.move_to_end(&MoveToEnd, window, cx); + editor.handle_input(".", window, cx); + }); + cx.run_until_parked(); + editor.update(cx, |editor, _| { + assert!(editor.context_menu_visible()); + if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() + { + let completion_labels = menu + .completions + .borrow() + .iter() + .map(|c| c.label.text.clone()) + .collect::>(); + assert_eq!( + completion_labels, + &["registered_command", "unregistered_command",], + ); + } else { + panic!("expected completion menu to be open"); + } + }); + + editor + .update_in(cx, |editor, window, cx| { + editor + .confirm_completion(&ConfirmCompletion::default(), window, cx) + .unwrap() + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + command_calls.load(atomic::Ordering::Acquire), + 1, + "For completion with a registered command, Zed should send a command execution request", + ); + + editor.update_in(cx, |editor, window, cx| { + cx.focus_self(window); + editor.handle_input(".", window, cx); + }); + cx.run_until_parked(); + editor.update(cx, |editor, _| { + assert!(editor.context_menu_visible()); + if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() + { + let completion_labels = menu + .completions + .borrow() + .iter() + .map(|c| c.label.text.clone()) + .collect::>(); + assert_eq!( + completion_labels, + &["registered_command", "unregistered_command",], + ); + } else { + panic!("expected completion menu to be open"); + } + }); + editor + .update_in(cx, |editor, window, cx| { + editor.context_menu_next(&Default::default(), window, cx); + editor + .confirm_completion(&ConfirmCompletion::default(), window, cx) + .unwrap() + }) + .await + .unwrap(); + cx.run_until_parked(); + assert_eq!( + command_calls.load(atomic::Ordering::Acquire), + 1, + "For completion with an unregistered command, Zed should not send a command execution request", + ); +} + +#[gpui::test] +async fn test_completion_reuse(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + trigger_characters: Some(vec![".".to_string()]), + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + + let counter = Arc::new(AtomicUsize::new(0)); + cx.set_state("objˇ"); + cx.simulate_keystroke("."); + + // Initial completion request returns complete results + let is_incomplete = false; + handle_completion_request( + "obj.|<>", + vec!["a", "ab", "abc"], + is_incomplete, + counter.clone(), + &mut cx, + ) + .await; + cx.run_until_parked(); + assert_eq!(counter.load(atomic::Ordering::Acquire), 1); + cx.assert_editor_state("obj.ˇ"); + check_displayed_completions(vec!["a", "ab", "abc"], &mut cx); + + // Type "a" - filters existing completions + cx.simulate_keystroke("a"); + cx.run_until_parked(); + assert_eq!(counter.load(atomic::Ordering::Acquire), 1); + cx.assert_editor_state("obj.aˇ"); + check_displayed_completions(vec!["a", "ab", "abc"], &mut cx); + + // Type "b" - filters existing completions + cx.simulate_keystroke("b"); + cx.run_until_parked(); + assert_eq!(counter.load(atomic::Ordering::Acquire), 1); + cx.assert_editor_state("obj.abˇ"); + check_displayed_completions(vec!["ab", "abc"], &mut cx); // Type "c" - filters existing completions cx.simulate_keystroke("c"); @@ -15775,7 +16312,7 @@ async fn test_toggle_comment(cx: &mut TestAppContext) { cx.assert_editor_state(indoc! {" fn a() { «b(); - c(); + ˇ»«c(); ˇ» d(); } "}); @@ -15787,8 +16324,8 @@ async fn test_toggle_comment(cx: &mut TestAppContext) { cx.assert_editor_state(indoc! {" fn a() { // «b(); - // c(); - ˇ»// d(); + ˇ»// «c(); + ˇ» // d(); } "}); @@ -15797,7 +16334,7 @@ async fn test_toggle_comment(cx: &mut TestAppContext) { fn a() { // b(); «// c(); - ˇ» // d(); + ˇ» // d(); } "}); @@ -15807,7 +16344,7 @@ async fn test_toggle_comment(cx: &mut TestAppContext) { fn a() { // b(); «c(); - ˇ» // d(); + ˇ» // d(); } "}); @@ -16365,7 +16902,11 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { ); assert_eq!(editor.text(cx), expected_text); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(selection_ranges) + s.select_ranges( + selection_ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }); editor.handle_input("X", window, cx); @@ -16383,6 +16924,9 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), expected_selections + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>() ); editor.newline(&Newline, window, cx); @@ -16403,6 +16947,9 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), expected_selections + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>() ); }); } @@ -16653,7 +17200,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) { anchor_range(Point::new(6, 3)..Point::new(6, 5)), anchor_range(Point::new(8, 4)..Point::new(8, 6)), ], - |_| Hsla::red(), + |_, _| Hsla::red(), cx, ); editor.highlight_background::( @@ -16663,7 +17210,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) { anchor_range(Point::new(7, 4)..Point::new(7, 7)), anchor_range(Point::new(9, 5)..Point::new(9, 8)), ], - |_| Hsla::green(), + |_, _| Hsla::green(), cx, ); @@ -16778,7 +17325,7 @@ async fn test_following(cx: &mut TestAppContext) { // Update the selections only _ = leader.update(cx, |leader, window, cx| { leader.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1]) + s.select_ranges([MultiBufferOffset(1)..MultiBufferOffset(1)]) }); }); follower @@ -16796,7 +17343,7 @@ async fn test_following(cx: &mut TestAppContext) { _ = follower.update(cx, |follower, _, cx| { assert_eq!( follower.selections.ranges(&follower.display_snapshot(cx)), - vec![1..1] + vec![MultiBufferOffset(1)..MultiBufferOffset(1)] ); }); assert!(*is_still_following.borrow()); @@ -16831,7 +17378,7 @@ async fn test_following(cx: &mut TestAppContext) { // via autoscroll, not via the leader's exact scroll position. _ = leader.update(cx, |leader, window, cx| { leader.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([0..0]) + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) }); leader.request_autoscroll(Autoscroll::newest(), cx); leader.set_scroll_position(gpui::Point::new(1.5, 3.5), window, cx); @@ -16852,7 +17399,7 @@ async fn test_following(cx: &mut TestAppContext) { assert_eq!(follower.scroll_position(cx), gpui::Point::new(1.5, 0.0)); assert_eq!( follower.selections.ranges(&follower.display_snapshot(cx)), - vec![0..0] + vec![MultiBufferOffset(0)..MultiBufferOffset(0)] ); }); assert!(*is_still_following.borrow()); @@ -16860,7 +17407,7 @@ async fn test_following(cx: &mut TestAppContext) { // Creating a pending selection that precedes another selection _ = leader.update(cx, |leader, window, cx| { leader.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([1..1]) + s.select_ranges([MultiBufferOffset(1)..MultiBufferOffset(1)]) }); leader.begin_selection(DisplayPoint::new(DisplayRow(0), 0), true, 1, window, cx); }); @@ -16879,7 +17426,10 @@ async fn test_following(cx: &mut TestAppContext) { _ = follower.update(cx, |follower, _, cx| { assert_eq!( follower.selections.ranges(&follower.display_snapshot(cx)), - vec![0..0, 1..1] + vec![ + MultiBufferOffset(0)..MultiBufferOffset(0), + MultiBufferOffset(1)..MultiBufferOffset(1) + ] ); }); assert!(*is_still_following.borrow()); @@ -16903,13 +17453,17 @@ async fn test_following(cx: &mut TestAppContext) { _ = follower.update(cx, |follower, _, cx| { assert_eq!( follower.selections.ranges(&follower.display_snapshot(cx)), - vec![0..2] + vec![MultiBufferOffset(0)..MultiBufferOffset(2)] ); }); // Scrolling locally breaks the follow _ = follower.update(cx, |follower, window, cx| { - let top_anchor = follower.buffer().read(cx).read(cx).anchor_after(0); + let top_anchor = follower + .buffer() + .read(cx) + .read(cx) + .anchor_after(MultiBufferOffset(0)); follower.set_scroll_anchor( ScrollAnchor { anchor: top_anchor, @@ -17369,12 +17923,49 @@ fn test_split_words() { assert_eq!(split(":do_the_thing"), &[":", "do_", "the_", "thing"]); } +#[test] +fn test_split_words_for_snippet_prefix() { + fn split(text: &str) -> Vec<&str> { + snippet_candidate_suffixes(text, |c| c.is_alphanumeric() || c == '_').collect() + } + + assert_eq!(split("HelloWorld"), &["HelloWorld"]); + assert_eq!(split("hello_world"), &["hello_world"]); + assert_eq!(split("_hello_world_"), &["_hello_world_"]); + assert_eq!(split("Hello_World"), &["Hello_World"]); + assert_eq!(split("helloWOrld"), &["helloWOrld"]); + assert_eq!(split("helloworld"), &["helloworld"]); + assert_eq!( + split("this@is!@#$^many . symbols"), + &[ + "symbols", + " symbols", + ". symbols", + " . symbols", + " . symbols", + " . symbols", + "many . symbols", + "^many . symbols", + "$^many . symbols", + "#$^many . symbols", + "@#$^many . symbols", + "!@#$^many . symbols", + "is!@#$^many . symbols", + "@is!@#$^many . symbols", + "this@is!@#$^many . symbols", + ], + ); + assert_eq!(split("a.s"), &["s", ".s", "a.s"]); +} + #[gpui::test] async fn test_move_to_enclosing_bracket(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorLspTestContext::new_typescript(Default::default(), cx).await; - let mut assert = |before, after| { + + #[track_caller] + fn assert(before: &str, after: &str, cx: &mut EditorLspTestContext) { let _state_context = cx.set_state(before); cx.run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -17382,30 +17973,33 @@ async fn test_move_to_enclosing_bracket(cx: &mut TestAppContext) { }); cx.run_until_parked(); cx.assert_editor_state(after); - }; + } // Outside bracket jumps to outside of matching bracket - assert("console.logˇ(var);", "console.log(var)ˇ;"); - assert("console.log(var)ˇ;", "console.logˇ(var);"); + assert("console.logˇ(var);", "console.log(var)ˇ;", &mut cx); + assert("console.log(var)ˇ;", "console.logˇ(var);", &mut cx); // Inside bracket jumps to inside of matching bracket - assert("console.log(ˇvar);", "console.log(varˇ);"); - assert("console.log(varˇ);", "console.log(ˇvar);"); + assert("console.log(ˇvar);", "console.log(varˇ);", &mut cx); + assert("console.log(varˇ);", "console.log(ˇvar);", &mut cx); // When outside a bracket and inside, favor jumping to the inside bracket assert( "console.log('foo', [1, 2, 3]ˇ);", - "console.log(ˇ'foo', [1, 2, 3]);", + "console.log('foo', ˇ[1, 2, 3]);", + &mut cx, ); assert( "console.log(ˇ'foo', [1, 2, 3]);", - "console.log('foo', [1, 2, 3]ˇ);", + "console.log('foo'ˇ, [1, 2, 3]);", + &mut cx, ); // Bias forward if two options are equally likely assert( "let result = curried_fun()ˇ();", "let result = curried_fun()()ˇ;", + &mut cx, ); // If directly adjacent to a smaller pair but inside a larger (not adjacent), pick the smaller @@ -17418,9 +18012,93 @@ async fn test_move_to_enclosing_bracket(cx: &mut TestAppContext) { function test() { console.logˇ('test') }"}, + &mut cx, ); } +#[gpui::test] +async fn test_move_to_enclosing_bracket_in_markdown_code_block(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let language_registry = Arc::new(language::LanguageRegistry::test(cx.executor())); + language_registry.add(markdown_lang()); + language_registry.add(rust_lang()); + let buffer = cx.new(|cx| { + let mut buffer = language::Buffer::local( + indoc! {" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } + } + ``` + "}, + cx, + ); + buffer.set_language_registry(language_registry.clone()); + buffer.set_language(Some(markdown_lang()), cx); + buffer + }); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let editor = cx.add_window(|window, cx| build_editor(buffer.clone(), window, cx)); + cx.executor().run_until_parked(); + _ = editor.update(cx, |editor, window, cx| { + // Case 1: Test outer enclosing brackets + select_ranges( + editor, + &indoc! {" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } + }ˇ + ``` + "}, + window, + cx, + ); + editor.move_to_enclosing_bracket(&MoveToEnclosingBracket, window, cx); + assert_text_with_selections( + editor, + &indoc! {" + ```rs + impl Worktree ˇ{ + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } + } + ``` + "}, + cx, + ); + // Case 2: Test inner enclosing brackets + select_ranges( + editor, + &indoc! {" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + }ˇ + } + ``` + "}, + window, + cx, + ); + editor.move_to_enclosing_bracket(&MoveToEnclosingBracket, window, cx); + assert_text_with_selections( + editor, + &indoc! {" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> ˇ{ + } + } + ``` + "}, + cx, + ); + }); +} + #[gpui::test] async fn test_on_type_formatting_not_triggered(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -17522,7 +18200,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut TestAppContext) { ); editor_handle.update_in(cx, |editor, window, cx| { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([Point::new(0, 21)..Point::new(0, 20)]) }); @@ -18640,17 +19318,120 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_addition_reverts(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; - let base_text = indoc! {r#" - struct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; +async fn test_document_format_with_prettier_explicit_language(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.formatter = Some(FormatterList::Single(Formatter::Prettier)) + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_file(path!("/file.settings"), Default::default()) + .await; + + let project = Project::test(fs, [path!("/file.settings").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + + let ts_lang = Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..LanguageMatcher::default() + }, + prettier_parser_name: Some("typescript".to_string()), + ..LanguageConfig::default() + }, + Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()), + )); + + language_registry.add(ts_lang.clone()); + + update_test_language_settings(cx, |settings| { + settings.defaults.prettier.get_or_insert_default().allowed = Some(true); + }); + + let test_plugin = "test_plugin"; + let _ = language_registry.register_fake_lsp( + "TypeScript", + FakeLspAdapter { + prettier_plugins: vec![test_plugin], + ..Default::default() + }, + ); + + let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/file.settings"), cx) + }) + .await + .unwrap(); + + project.update(cx, |project, cx| { + project.set_language_for_buffer(&buffer, ts_lang, cx) + }); + + let buffer_text = "one\ntwo\nthree\n"; + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx)); + editor.update_in(cx, |editor, window, cx| { + editor.set_text(buffer_text, window, cx) + }); + + editor + .update_in(cx, |editor, window, cx| { + editor.perform_format( + project.clone(), + FormatTrigger::Manual, + FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()), + window, + cx, + ) + }) + .unwrap() + .await; + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + buffer_text.to_string() + prettier_format_suffix + "\ntypescript", + "Test prettier formatting was not applied to the original buffer text", + ); + + update_test_language_settings(cx, |settings| { + settings.defaults.formatter = Some(FormatterList::default()) + }); + let format = editor.update_in(cx, |editor, window, cx| { + editor.perform_format( + project.clone(), + FormatTrigger::Manual, + FormatTarget::Buffers(editor.buffer().read(cx).all_buffers()), + window, + cx, + ) + }); + format.await.unwrap(); + + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + buffer_text.to_string() + + prettier_format_suffix + + "\ntypescript\n" + + prettier_format_suffix + + "\ntypescript", + "Autoformatting (via test prettier) was not applied to the original buffer text", + ); +} + +#[gpui::test] +async fn test_addition_reverts(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + let base_text = indoc! {r#" + struct Row; + struct Row1; + struct Row2; + + struct Row4; + struct Row5; + struct Row6; struct Row8; struct Row9; @@ -19285,7 +20066,7 @@ async fn test_multibuffer_in_navigation_history(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(1..2)), + |s| s.select_ranges(Some(MultiBufferOffset(1)..MultiBufferOffset(2))), ); editor.open_excerpts(&OpenExcerpts, window, cx); }); @@ -19341,7 +20122,7 @@ async fn test_multibuffer_in_navigation_history(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(39..40)), + |s| s.select_ranges(Some(MultiBufferOffset(39)..MultiBufferOffset(40))), ); editor.open_excerpts(&OpenExcerpts, window, cx); }); @@ -19401,7 +20182,7 @@ async fn test_multibuffer_in_navigation_history(cx: &mut TestAppContext) { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(70..70)), + |s| s.select_ranges(Some(MultiBufferOffset(70)..MultiBufferOffset(70))), ); editor.open_excerpts(&OpenExcerpts, window, cx); }); @@ -21270,10 +22051,9 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) .collect::>(); let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; - let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -21361,10 +22141,9 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) .collect::>(); let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; - let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -21427,10 +22206,9 @@ async fn test_toggle_deletion_hunk_at_start_of_file( .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) .collect::>(); let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; - let buffer_id = hunks[0].buffer_id; hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, buffer_id, hunk.buffer_range)) + .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) .collect::>() }); assert_eq!(hunk_ranges.len(), 1); @@ -21456,6 +22234,40 @@ async fn test_toggle_deletion_hunk_at_start_of_file( cx.assert_state_with_diff(hunk_expanded); } +#[gpui::test] +async fn test_expand_first_line_diff_hunk_keeps_deleted_lines_visible( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state("ˇnew\nsecond\nthird\n"); + cx.set_head_text("old\nsecond\nthird\n"); + cx.update_editor(|editor, window, cx| { + editor.scroll(gpui::Point { x: 0., y: 0. }, None, window, cx); + }); + executor.run_until_parked(); + assert_eq!(cx.update_editor(|e, _, cx| e.scroll_position(cx)).y, 0.0); + + // Expanding a diff hunk at the first line inserts deleted lines above the first buffer line. + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let hunks = editor + .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .collect::>(); + assert_eq!(hunks.len(), 1); + let hunk_range = Anchor::range_in_buffer(excerpt_id, hunks[0].buffer_range.clone()); + editor.toggle_single_diff_hunk(hunk_range, cx) + }); + executor.run_until_parked(); + cx.assert_state_with_diff("- old\n+ ˇnew\n second\n third\n".to_string()); + + // Keep the editor scrolled to the top so the full hunk remains visible. + assert_eq!(cx.update_editor(|e, _, cx| e.scroll_position(cx)).y, 0.0); +} + #[gpui::test] async fn test_display_diff_hunks(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -22023,7 +22835,7 @@ async fn test_find_all_references_editor_reuse(cx: &mut TestAppContext) { }); let navigated = cx .update_editor(|editor, window, cx| { - editor.find_all_references(&FindAllReferences, window, cx) + editor.find_all_references(&FindAllReferences::default(), window, cx) }) .unwrap() .await @@ -22059,7 +22871,7 @@ async fn test_find_all_references_editor_reuse(cx: &mut TestAppContext) { ); let navigated = cx .update_editor(|editor, window, cx| { - editor.find_all_references(&FindAllReferences, window, cx) + editor.find_all_references(&FindAllReferences::default(), window, cx) }) .unwrap() .await @@ -22111,7 +22923,7 @@ async fn test_find_all_references_editor_reuse(cx: &mut TestAppContext) { }); let navigated = cx .update_editor(|editor, window, cx| { - editor.find_all_references(&FindAllReferences, window, cx) + editor.find_all_references(&FindAllReferences::default(), window, cx) }) .unwrap() .await @@ -22191,7 +23003,7 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { (buffer.read(cx).remote_id(), 3), RunnableTasks { templates: vec![], - offset: snapshot.anchor_before(43), + offset: snapshot.anchor_before(MultiBufferOffset(43)), column: 0, extra_variables: HashMap::default(), context_range: BufferOffset(43)..BufferOffset(85), @@ -22201,7 +23013,7 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { (buffer.read(cx).remote_id(), 8), RunnableTasks { templates: vec![], - offset: snapshot.anchor_before(86), + offset: snapshot.anchor_before(MultiBufferOffset(86)), column: 0, extra_variables: HashMap::default(), context_range: BufferOffset(86)..BufferOffset(191), @@ -23520,7 +24332,7 @@ async fn test_rename_with_duplicate_edits(cx: &mut TestAppContext) { let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx)); editor.highlight_background::( &[highlight_range], - |theme| theme.colors().editor_document_highlight_read_background, + |_, theme| theme.colors().editor_document_highlight_read_background, cx, ); }); @@ -23598,7 +24410,7 @@ async fn test_rename_without_prepare(cx: &mut TestAppContext) { let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx)); editor.highlight_background::( &[highlight_range], - |theme| theme.colors().editor_document_highlight_read_background, + |_, theme| theme.colors().editor_document_highlight_read_background, cx, ); }); @@ -24767,6 +25579,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_python(cx: &mut TestApp ˇ log('for else') "}); cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): ˇfor item in items: @@ -24786,6 +25599,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_python(cx: &mut TestApp // test relative indent is preserved when tab // for `if`, `elif`, `else`, `while`, `with` and `for` cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): ˇfor item in items: @@ -24819,6 +25633,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_python(cx: &mut TestApp ˇ return 0 "}); cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): ˇtry: @@ -24835,6 +25650,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_python(cx: &mut TestApp // test relative indent is preserved when tab // for `try`, `except`, `else`, `finally`, `match` and `def` cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): ˇtry: @@ -24868,6 +25684,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("else:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): if i == 2: @@ -24885,6 +25702,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("except:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -24904,6 +25722,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("else:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -24927,6 +25746,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("finally:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -24951,6 +25771,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("else:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -24976,6 +25797,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("finally:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -25001,6 +25823,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("except:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -25024,6 +25847,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("except:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): try: @@ -25045,6 +25869,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("else:", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def main(): for i in range(10): @@ -25061,6 +25886,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("a", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" def f() -> list[str]: aˇ @@ -25074,6 +25900,7 @@ async fn test_outdent_after_input_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input(":", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" match 1: case:ˇ @@ -25097,6 +25924,7 @@ async fn test_indent_on_newline_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" # COMMENT: ˇ @@ -25109,7 +25937,7 @@ async fn test_indent_on_newline_for_python(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" { ˇ @@ -25143,6 +25971,48 @@ async fn test_indent_on_newline_for_python(cx: &mut TestAppContext) { "}); } +#[gpui::test] +async fn test_python_indent_in_markdown(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let language_registry = Arc::new(language::LanguageRegistry::test(cx.executor())); + let python_lang = languages::language("python", tree_sitter_python::LANGUAGE.into()); + language_registry.add(markdown_lang()); + language_registry.add(python_lang); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| { + buffer.set_language_registry(language_registry); + buffer.set_language(Some(markdown_lang()), cx); + }); + + // Test that `else:` correctly outdents to match `if:` inside the Python code block + cx.set_state(indoc! {" + # Heading + + ```python + def main(): + if condition: + pass + ˇ + ``` + "}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("else:", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + # Heading + + ```python + def main(): + if condition: + pass + else:ˇ + ``` + "}); +} + #[gpui::test] async fn test_tab_in_leading_whitespace_auto_indents_for_bash(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -25169,6 +26039,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_bash(cx: &mut TestAppCo ˇ} "}); cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" function main() { ˇfor item in $items; do @@ -25186,6 +26057,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_bash(cx: &mut TestAppCo "}); // test relative indent is preserved when tab cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" function main() { ˇfor item in $items; do @@ -25220,6 +26092,7 @@ async fn test_tab_in_leading_whitespace_auto_indents_for_bash(cx: &mut TestAppCo ˇ} "}); cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" function handle() { ˇcase \"$1\" in @@ -25262,6 +26135,7 @@ async fn test_indent_after_input_for_bash(cx: &mut TestAppContext) { ˇ} "}); cx.update_editor(|e, window, cx| e.handle_input("#", window, cx)); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" function main() { #ˇ for item in $items; do @@ -25296,6 +26170,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("else", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then echo \"foo bar\" @@ -25311,6 +26186,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("elif", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then echo \"foo bar\" @@ -25328,6 +26204,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("fi", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then echo \"foo bar\" @@ -25345,6 +26222,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("done", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" while read line; do echo \"$line\" @@ -25360,6 +26238,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("done", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" for file in *.txt; do cat \"$file\" @@ -25380,6 +26259,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("esac", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" case \"$1\" in start) @@ -25402,6 +26282,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("*)", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" case \"$1\" in start) @@ -25421,6 +26302,7 @@ async fn test_outdent_after_input_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.handle_input("fi", window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then echo \"outer if\" @@ -25447,6 +26329,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" # COMMENT: ˇ @@ -25460,7 +26343,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then @@ -25475,7 +26358,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then else @@ -25490,7 +26373,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" if [ \"$1\" = \"test\" ]; then elif @@ -25504,7 +26387,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" for file in *.txt; do ˇ @@ -25518,7 +26401,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" case \"$1\" in start) @@ -25535,7 +26418,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" case \"$1\" in start) @@ -25551,7 +26434,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" function test() { ˇ @@ -25565,7 +26448,7 @@ async fn test_indent_on_newline_for_bash(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { editor.newline(&Newline, window, cx); }); - cx.run_until_parked(); + cx.wait_for_autoindent_applied().await; cx.assert_editor_state(indoc! {" echo \"test\"; ˇ @@ -25583,7 +26466,10 @@ fn assert_selection_ranges(marked_text: &str, editor: &mut Editor, cx: &mut Cont assert_eq!(editor.text(cx), text); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - ranges, + ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>(), "Assert selections are {}", marked_text ); @@ -25620,6 +26506,195 @@ pub fn check_displayed_completions(expected: Vec<&'static str>, cx: &mut EditorL }); } +#[gpui::test] +async fn test_mixed_completions_with_multi_word_snippet(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + completion_provider: Some(lsp::CompletionOptions { + ..Default::default() + }), + ..Default::default() + }, + cx, + ) + .await; + cx.lsp + .set_request_handler::(move |_, _| async move { + Ok(Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "unsafe".into(), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 9, + }, + end: lsp::Position { + line: 0, + character: 11, + }, + }, + new_text: "unsafe".to_string(), + })), + insert_text_mode: Some(lsp::InsertTextMode::AS_IS), + ..Default::default() + }, + ]))) + }); + + cx.update_editor(|editor, _, cx| { + editor.project().unwrap().update(cx, |project, cx| { + project.snippets().update(cx, |snippets, _cx| { + snippets.add_snippet_for_test( + None, + PathBuf::from("test_snippets.json"), + vec![ + Arc::new(project::snippet_provider::Snippet { + prefix: vec![ + "unlimited word count".to_string(), + "unlimit word count".to_string(), + "unlimited unknown".to_string(), + ], + body: "this is many words".to_string(), + description: Some("description".to_string()), + name: "multi-word snippet test".to_string(), + }), + Arc::new(project::snippet_provider::Snippet { + prefix: vec!["unsnip".to_string(), "@few".to_string()], + body: "fewer words".to_string(), + description: Some("alt description".to_string()), + name: "other name".to_string(), + }), + Arc::new(project::snippet_provider::Snippet { + prefix: vec!["ab aa".to_string()], + body: "abcd".to_string(), + description: None, + name: "alphabet".to_string(), + }), + ], + ); + }); + }) + }); + + let get_completions = |cx: &mut EditorLspTestContext| { + cx.update_editor(|editor, _, _| match &*editor.context_menu.borrow() { + Some(CodeContextMenu::Completions(context_menu)) => { + let entries = context_menu.entries.borrow(); + entries + .iter() + .map(|entry| entry.string.clone()) + .collect_vec() + } + _ => vec![], + }) + }; + + // snippets: + // @foo + // foo bar + // + // when typing: + // + // when typing: + // - if I type a symbol "open the completions with snippets only" + // - if I type a word character "open the completions menu" (if it had been open snippets only, clear it out) + // + // stuff we need: + // - filtering logic change? + // - remember how far back the completion started. + + let test_cases: &[(&str, &[&str])] = &[ + ( + "un", + &[ + "unsafe", + "unlimit word count", + "unlimited unknown", + "unlimited word count", + "unsnip", + ], + ), + ( + "u ", + &[ + "unlimit word count", + "unlimited unknown", + "unlimited word count", + ], + ), + ("u a", &["ab aa", "unsafe"]), // unsAfe + ( + "u u", + &[ + "unsafe", + "unlimit word count", + "unlimited unknown", // ranked highest among snippets + "unlimited word count", + "unsnip", + ], + ), + ("uw c", &["unlimit word count", "unlimited word count"]), + ( + "u w", + &[ + "unlimit word count", + "unlimited word count", + "unlimited unknown", + ], + ), + ("u w ", &["unlimit word count", "unlimited word count"]), + ( + "u ", + &[ + "unlimit word count", + "unlimited unknown", + "unlimited word count", + ], + ), + ("wor", &[]), + ("uf", &["unsafe"]), + ("af", &["unsafe"]), + ("afu", &[]), + ( + "ue", + &["unsafe", "unlimited unknown", "unlimited word count"], + ), + ("@", &["@few"]), + ("@few", &["@few"]), + ("@ ", &[]), + ("a@", &["@few"]), + ("a@f", &["@few", "unsafe"]), + ("a@fw", &["@few"]), + ("a", &["ab aa", "unsafe"]), + ("aa", &["ab aa"]), + ("aaa", &["ab aa"]), + ("ab", &["ab aa"]), + ("ab ", &["ab aa"]), + ("ab a", &["ab aa", "unsafe"]), + ("ab ab", &["ab aa"]), + ("ab ab aa", &["ab aa"]), + ]; + + for &(input_to_simulate, expected_completions) in test_cases { + cx.set_state("fn a() { ˇ }\n"); + for c in input_to_simulate.split("") { + cx.simulate_input(c); + cx.run_until_parked(); + } + let expected_completions = expected_completions + .iter() + .map(|s| s.to_string()) + .collect_vec(); + assert_eq!( + get_completions(&mut cx), + expected_completions, + "< actual / expected >, input = {input_to_simulate:?}", + ); + } +} + /// Handle completion request passing a marked string specifying where the completion /// should be triggered from using '|' character, what range should be replaced, and what completions /// should be returned using '<' and '>' to delimit the range. @@ -25640,10 +26715,12 @@ pub fn handle_completion_request( vec![complete_from_marker.clone(), replace_range_marker.clone()], ); - let complete_from_position = - cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); + let complete_from_position = cx.to_lsp(MultiBufferOffset( + marked_ranges.remove(&complete_from_marker).unwrap()[0].start, + )); + let range = marked_ranges.remove(&replace_range_marker).unwrap()[0].clone(); let replace_range = - cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + cx.to_lsp_range(MultiBufferOffset(range.start)..MultiBufferOffset(range.end)); let mut request = cx.set_request_handler::(move |url, params, _| { @@ -25704,13 +26781,18 @@ pub fn handle_completion_request_with_insert_and_replace( ], ); - let complete_from_position = - cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start); + let complete_from_position = cx.to_lsp(MultiBufferOffset( + marked_ranges.remove(&complete_from_marker).unwrap()[0].start, + )); + let range = marked_ranges.remove(&replace_range_marker).unwrap()[0].clone(); let replace_range = - cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone()); + cx.to_lsp_range(MultiBufferOffset(range.start)..MultiBufferOffset(range.end)); let insert_range = match marked_ranges.remove(&insert_range_marker) { - Some(ranges) if !ranges.is_empty() => cx.to_lsp_range(ranges[0].clone()), + Some(ranges) if !ranges.is_empty() => { + let range1 = ranges[0].clone(); + cx.to_lsp_range(MultiBufferOffset(range1.start)..MultiBufferOffset(range1.end)) + } _ => lsp::Range { start: replace_range.start, end: complete_from_position, @@ -25760,7 +26842,10 @@ fn handle_resolve_completion_request( .iter() .map(|(marked_string, new_text)| { let (_, marked_ranges) = marked_text_ranges(marked_string, false); - let replace_range = cx.to_lsp_range(marked_ranges[0].clone()); + let replace_range = cx.to_lsp_range( + MultiBufferOffset(marked_ranges[0].start) + ..MultiBufferOffset(marked_ranges[0].end), + ); lsp::TextEdit::new(replace_range, new_text.to_string()) }) .collect::>() @@ -25821,7 +26906,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC let store = SettingsStore::test(cx); cx.set_global(store); theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); crate::init(cx); }); zlog::init_test(); @@ -25844,7 +26929,7 @@ fn assert_hunk_revert( let snapshot = editor.snapshot(window, cx); let reverted_hunk_statuses = snapshot .buffer_snapshot() - .diff_hunks_in_range(0..snapshot.buffer_snapshot().len()) + .diff_hunks_in_range(MultiBufferOffset(0)..snapshot.buffer_snapshot().len()) .map(|hunk| hunk.status().kind) .collect::>(); @@ -25934,7 +27019,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { } }); - let ensure_result_id = |expected: Option, cx: &mut TestAppContext| { + let ensure_result_id = |expected: Option, cx: &mut TestAppContext| { project.update(cx, |project, cx| { let buffer_id = editor .read(cx) @@ -25947,7 +27032,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { let buffer_result_id = project .lsp_store() .read(cx) - .result_id(server_id, buffer_id, cx); + .result_id_for_buffer_pull(server_id, buffer_id, &None, cx); assert_eq!(expected, buffer_result_id); }); }; @@ -25964,7 +27049,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { .next() .await .expect("should have sent the first diagnostics pull request"); - ensure_result_id(Some("1".to_string()), cx); + ensure_result_id(Some(SharedString::new("1")), cx); // Editing should trigger diagnostics editor.update_in(cx, |editor, window, cx| { @@ -25977,7 +27062,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { 2, "Editing should trigger diagnostic request" ); - ensure_result_id(Some("2".to_string()), cx); + ensure_result_id(Some(SharedString::new("2")), cx); // Moving cursor should not trigger diagnostic request editor.update_in(cx, |editor, window, cx| { @@ -25992,7 +27077,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { 2, "Cursor movement should not trigger diagnostic request" ); - ensure_result_id(Some("2".to_string()), cx); + ensure_result_id(Some(SharedString::new("2")), cx); // Multiple rapid edits should be debounced for _ in 0..5 { editor.update_in(cx, |editor, window, cx| { @@ -26007,7 +27092,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) { final_requests <= 4, "Multiple rapid edits should be debounced (got {final_requests} requests)", ); - ensure_result_id(Some(final_requests.to_string()), cx); + ensure_result_id(Some(SharedString::new(final_requests.to_string())), cx); } #[gpui::test] @@ -26137,22 +27222,98 @@ async fn test_add_selection_skip_soft_wrap_option(cx: &mut TestAppContext) { }); } -#[gpui::test(iterations = 10)] -async fn test_document_colors(cx: &mut TestAppContext) { - let expected_color = Rgba { - r: 0.33, - g: 0.33, - b: 0.33, - a: 0.33, - }; - +#[gpui::test] +async fn test_insert_snippet(cx: &mut TestAppContext) { init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/a"), - json!({ - "first.rs": "fn main() { let a = 5; }", + cx.update_editor(|editor, _, cx| { + editor.project().unwrap().update(cx, |project, cx| { + project.snippets().update(cx, |snippets, _cx| { + let snippet = project::snippet_provider::Snippet { + prefix: vec![], // no prefix needed! + body: "an Unspecified".to_string(), + description: Some("shhhh it's a secret".to_string()), + name: "super secret snippet".to_string(), + }; + snippets.add_snippet_for_test( + None, + PathBuf::from("test_snippets.json"), + vec![Arc::new(snippet)], + ); + + let snippet = project::snippet_provider::Snippet { + prefix: vec![], // no prefix needed! + body: " Location".to_string(), + description: Some("the word 'location'".to_string()), + name: "location word".to_string(), + }; + snippets.add_snippet_for_test( + Some("Markdown".to_string()), + PathBuf::from("test_snippets.json"), + vec![Arc::new(snippet)], + ); + }); + }) + }); + + cx.set_state(indoc!(r#"First cursor at ˇ and second cursor at ˇ"#)); + + cx.update_editor(|editor, window, cx| { + editor.insert_snippet_at_selections( + &InsertSnippet { + language: None, + name: Some("super secret snippet".to_string()), + snippet: None, + }, + window, + cx, + ); + + // Language is specified in the action, + // so the buffer language does not need to match + editor.insert_snippet_at_selections( + &InsertSnippet { + language: Some("Markdown".to_string()), + name: Some("location word".to_string()), + snippet: None, + }, + window, + cx, + ); + + editor.insert_snippet_at_selections( + &InsertSnippet { + language: None, + name: None, + snippet: Some("$0 after".to_string()), + }, + window, + cx, + ); + }); + + cx.assert_editor_state( + r#"First cursor at an Unspecified Locationˇ after and second cursor at an Unspecified Locationˇ after"#, + ); +} + +#[gpui::test(iterations = 10)] +async fn test_document_colors(cx: &mut TestAppContext) { + let expected_color = Rgba { + r: 0.33, + g: 0.33, + b: 0.33, + a: 0.33, + }; + + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/a"), + json!({ + "first.rs": "fn main() { let a = 5; }", }), ) .await; @@ -26468,7 +27629,9 @@ async fn test_newline_replacement_in_single_line(cx: &mut TestAppContext) { editor.update(cx, |editor, cx| { assert_eq!(editor.display_text(cx), "oops⋯⋯wow⋯"); }); - editor.update(cx, |editor, cx| editor.edit([(3..5, "")], cx)); + editor.update(cx, |editor, cx| { + editor.edit([(MultiBufferOffset(3)..MultiBufferOffset(5), "")], cx) + }); cx.run_until_parked(); editor.update(cx, |editor, cx| { assert_eq!(editor.display_text(cx), "oop⋯wow⋯"); @@ -26503,11 +27666,10 @@ async fn test_non_utf_8_opens(cx: &mut TestAppContext) { }) .await .unwrap(); - - assert_eq!( - handle.to_any().entity_type(), - TypeId::of::() - ); + // The test file content `vec![0xff, 0xfe, ...]` starts with a UTF-16 LE BOM. + // Previously, this fell back to `InvalidItemView` because it wasn't valid UTF-8. + // With auto-detection enabled, this is now recognized as UTF-16 and opens in the Editor. + assert_eq!(handle.to_any_view().entity_type(), TypeId::of::()); } #[gpui::test] @@ -26642,7 +27804,7 @@ let result = variable * 2;", editor.highlight_background::( &anchor_ranges, - |theme| theme.colors().editor_document_highlight_read_background, + |_, theme| theme.colors().editor_document_highlight_read_background, cx, ); }); @@ -26738,6 +27900,186 @@ async fn test_paste_url_from_other_app_creates_markdown_link_over_selected_text( )); } +#[gpui::test] +async fn test_markdown_indents(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Test if adding a character with multi cursors preserves nested list indents + cx.set_state(&indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [ˇ] Item 2 + - [ˇ] Item 2.a + - [ˇ] Item 2.b + " + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input("x", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [xˇ] Item 2 + - [xˇ] Item 2.a + - [xˇ] Item 2.b + " + }); + + // Case 2: Test adding new line after nested list preserves indent of previous line + cx.set_state(&indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [x] Item 2 + - [x] Item 2.a + - [x] Item 2.bˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.newline(&Newline, window, cx); + }); + cx.assert_editor_state(indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [x] Item 2 + - [x] Item 2.a + - [x] Item 2.b + ˇ" + }); + + // Case 3: Test adding a new nested list item preserves indent + cx.set_state(&indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [x] Item 2 + - [x] Item 2.a + - [x] Item 2.b + ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input("-", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [x] Item 2 + - [x] Item 2.a + - [x] Item 2.b + -ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(" [x] Item 2.c", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + - [ ] Item 1 + - [ ] Item 1.a + - [x] Item 2 + - [x] Item 2.a + - [x] Item 2.b + - [x] Item 2.cˇ" + }); + + // Case 4: Test adding new line after nested ordered list preserves indent of previous line + cx.set_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.bˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.newline(&Newline, window, cx); + }); + cx.assert_editor_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.b + ˇ" + }); + + // Case 5: Adding new ordered list item preserves indent + cx.set_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.b + ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input("3", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.b + 3ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(".", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.b + 3.ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(" Item 2.c", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + 1. Item 1 + 1. Item 1.a + 2. Item 2 + 1. Item 2.a + 2. Item 2.b + 3. Item 2.cˇ" + }); + + // Case 6: Test adding new line after nested ordered list preserves indent of previous line + cx.set_state(indoc! {" + - Item 1 + - Item 1.a + - Item 1.a + ˇ"}); + cx.update_editor(|editor, window, cx| { + editor.handle_input("-", window, cx); + }); + cx.run_until_parked(); + cx.assert_editor_state(indoc! {" + - Item 1 + - Item 1.a + - Item 1.a + -ˇ"}); + + // Case 7: Test blockquote newline preserves something + cx.set_state(indoc! {" + > Item 1ˇ" + }); + cx.update_editor(|editor, window, cx| { + editor.newline(&Newline, window, cx); + }); + cx.assert_editor_state(indoc! {" + > Item 1 + ˇ" + }); +} + #[gpui::test] async fn test_paste_url_from_zed_copy_creates_markdown_link_over_selected_text( cx: &mut gpui::TestAppContext, @@ -27104,6 +28446,60 @@ async fn test_copy_line_without_trailing_newline(cx: &mut TestAppContext) { cx.assert_editor_state("line1\nline2\nˇ"); } +#[gpui::test] +async fn test_multi_selection_copy_with_newline_between_copied_lines(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state("ˇline1\nˇline2\nˇline3\n"); + + cx.update_editor(|e, window, cx| e.copy(&Copy, window, cx)); + + let clipboard_text = cx + .read_from_clipboard() + .and_then(|item| item.text().as_deref().map(str::to_string)); + + assert_eq!( + clipboard_text, + Some("line1\nline2\nline3\n".to_string()), + "Copying multiple lines should include a single newline between lines" + ); + + cx.set_state("lineA\nˇ"); + + cx.update_editor(|e, window, cx| e.paste(&Paste, window, cx)); + + cx.assert_editor_state("lineA\nline1\nline2\nline3\nˇ"); +} + +#[gpui::test] +async fn test_multi_selection_cut_with_newline_between_copied_lines(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state("ˇline1\nˇline2\nˇline3\n"); + + cx.update_editor(|e, window, cx| e.cut(&Cut, window, cx)); + + let clipboard_text = cx + .read_from_clipboard() + .and_then(|item| item.text().as_deref().map(str::to_string)); + + assert_eq!( + clipboard_text, + Some("line1\nline2\nline3\n".to_string()), + "Copying multiple lines should include a single newline between lines" + ); + + cx.set_state("lineA\nˇ"); + + cx.update_editor(|e, window, cx| e.paste(&Paste, window, cx)); + + cx.assert_editor_state("lineA\nline1\nline2\nline3\nˇ"); +} + #[gpui::test] async fn test_end_of_editor_context(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -27158,7 +28554,8 @@ async fn test_sticky_scroll(cx: &mut TestAppContext) { let mut sticky_headers = |offset: ScrollOffset| { cx.update_editor(|e, window, cx| { e.scroll(gpui::Point { x: 0., y: offset }, None, window, cx); - EditorElement::sticky_headers(&e, &e.snapshot(window, cx), cx) + let style = e.style(cx).clone(); + EditorElement::sticky_headers(&e, &e.snapshot(window, cx), &style, cx) .into_iter() .map( |StickyHeader { @@ -27212,10 +28609,9 @@ async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) { }); let mut cx = EditorTestContext::new(cx).await; - let line_height = cx.editor(|editor, window, _cx| { + let line_height = cx.update_editor(|editor, window, cx| { editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()) }); @@ -27486,7 +28882,7 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { // Scenario 1: Unfolded buffers, position cursor on "2", select all matches, then insert cx.update_editor(|editor, window, cx| { editor.change_selections(None.into(), window, cx, |s| { - s.select_ranges([2..3]); + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(3)]); }); }); cx.assert_excerpts_with_selections(indoc! {" @@ -27543,7 +28939,7 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { // Select "2" and select all matches cx.update_editor(|editor, window, cx| { editor.change_selections(None.into(), window, cx, |s| { - s.select_ranges([2..3]); + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(3)]); }); editor .select_all_matches(&SelectAllMatches, window, cx) @@ -27594,7 +28990,7 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { // Select "2" and select all matches cx.update_editor(|editor, window, cx| { editor.change_selections(None.into(), window, cx, |s| { - s.select_ranges([2..3]); + s.select_ranges([MultiBufferOffset(2)..MultiBufferOffset(3)]); }); editor .select_all_matches(&SelectAllMatches, window, cx) @@ -27632,34 +29028,580 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { 3 "}); - // Edge case scenario: fold all buffers, then try to insert + // Test correct folded header is selected upon fold cx.update_editor(|editor, _, cx| { editor.fold_buffer(buffer_ids[0], cx); editor.fold_buffer(buffer_ids[1], cx); }); cx.assert_excerpts_with_selections(indoc! {" - [EXCERPT] - ˇ[FOLDED] [EXCERPT] [FOLDED] + [EXCERPT] + ˇ[FOLDED] "}); - // Insert should work via default selection + // Test selection inside folded buffer unfolds it on type cx.update_editor(|editor, window, cx| { editor.handle_input("W", window, cx); }); cx.update_editor(|editor, _, cx| { editor.unfold_buffer(buffer_ids[0], cx); - editor.unfold_buffer(buffer_ids[1], cx); }); cx.assert_excerpts_with_selections(indoc! {" [EXCERPT] - Wˇ1 + 1 2 3 [EXCERPT] - 1 + Wˇ1 Z 3 "}); } + +#[gpui::test] +async fn test_filtered_editor_pair(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut leader_cx = EditorTestContext::new(cx).await; + + let diff_base = indoc!( + r#" + one + two + three + four + five + six + "# + ); + + let initial_state = indoc!( + r#" + ˇone + two + THREE + four + five + six + "# + ); + + leader_cx.set_state(initial_state); + + leader_cx.set_head_text(&diff_base); + leader_cx.run_until_parked(); + + let follower = leader_cx.update_multibuffer(|leader, cx| { + leader.set_filter_mode(Some(MultiBufferFilterMode::KeepInsertions)); + leader.set_all_diff_hunks_expanded(cx); + leader.get_or_create_follower(cx) + }); + follower.update(cx, |follower, cx| { + follower.set_filter_mode(Some(MultiBufferFilterMode::KeepDeletions)); + follower.set_all_diff_hunks_expanded(cx); + }); + + let follower_editor = + leader_cx.new_window_entity(|window, cx| build_editor(follower, window, cx)); + // leader_cx.window.focus(&follower_editor.focus_handle(cx)); + + let mut follower_cx = EditorTestContext::for_editor_in(follower_editor, &mut leader_cx).await; + cx.run_until_parked(); + + leader_cx.assert_editor_state(initial_state); + follower_cx.assert_editor_state(indoc! { + r#" + ˇone + two + three + four + five + six + "# + }); + + follower_cx.editor(|editor, _window, cx| { + assert!(editor.read_only(cx)); + }); + + leader_cx.update_editor(|editor, _window, cx| { + editor.edit([(Point::new(4, 0)..Point::new(5, 0), "FIVE\n")], cx); + }); + cx.run_until_parked(); + + leader_cx.assert_editor_state(indoc! { + r#" + ˇone + two + THREE + four + FIVE + six + "# + }); + + follower_cx.assert_editor_state(indoc! { + r#" + ˇone + two + three + four + five + six + "# + }); + + leader_cx.update_editor(|editor, _window, cx| { + editor.edit([(Point::new(6, 0)..Point::new(6, 0), "SEVEN")], cx); + }); + cx.run_until_parked(); + + leader_cx.assert_editor_state(indoc! { + r#" + ˇone + two + THREE + four + FIVE + six + SEVEN"# + }); + + follower_cx.assert_editor_state(indoc! { + r#" + ˇone + two + three + four + five + six + "# + }); + + leader_cx.update_editor(|editor, window, cx| { + editor.move_down(&MoveDown, window, cx); + editor.refresh_selected_text_highlights(true, window, cx); + }); + leader_cx.run_until_parked(); +} + +#[gpui::test] +async fn test_filtered_editor_pair_complex(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let base_text = "base\n"; + let buffer_text = "buffer\n"; + + let buffer1 = cx.new(|cx| Buffer::local(buffer_text, cx)); + let diff1 = cx.new(|cx| BufferDiff::new_with_base_text(base_text, &buffer1, cx)); + + let extra_buffer_1 = cx.new(|cx| Buffer::local("dummy text 1\n", cx)); + let extra_diff_1 = cx.new(|cx| BufferDiff::new_with_base_text("", &extra_buffer_1, cx)); + let extra_buffer_2 = cx.new(|cx| Buffer::local("dummy text 2\n", cx)); + let extra_diff_2 = cx.new(|cx| BufferDiff::new_with_base_text("", &extra_buffer_2, cx)); + + let leader = cx.new(|cx| { + let mut leader = MultiBuffer::new(Capability::ReadWrite); + leader.set_all_diff_hunks_expanded(cx); + leader.set_filter_mode(Some(MultiBufferFilterMode::KeepInsertions)); + leader + }); + let follower = leader.update(cx, |leader, cx| leader.get_or_create_follower(cx)); + follower.update(cx, |follower, _| { + follower.set_filter_mode(Some(MultiBufferFilterMode::KeepDeletions)); + }); + + leader.update(cx, |leader, cx| { + leader.insert_excerpts_after( + ExcerptId::min(), + extra_buffer_2.clone(), + vec![ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + cx, + ); + leader.add_diff(extra_diff_2.clone(), cx); + + leader.insert_excerpts_after( + ExcerptId::min(), + extra_buffer_1.clone(), + vec![ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + cx, + ); + leader.add_diff(extra_diff_1.clone(), cx); + + leader.insert_excerpts_after( + ExcerptId::min(), + buffer1.clone(), + vec![ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + cx, + ); + leader.add_diff(diff1.clone(), cx); + }); + + cx.run_until_parked(); + let mut cx = cx.add_empty_window(); + + let leader_editor = cx + .new_window_entity(|window, cx| Editor::for_multibuffer(leader.clone(), None, window, cx)); + let follower_editor = cx.new_window_entity(|window, cx| { + Editor::for_multibuffer(follower.clone(), None, window, cx) + }); + + let mut leader_cx = EditorTestContext::for_editor_in(leader_editor.clone(), &mut cx).await; + leader_cx.assert_editor_state(indoc! {" + ˇbuffer + + dummy text 1 + + dummy text 2 + "}); + let mut follower_cx = EditorTestContext::for_editor_in(follower_editor.clone(), &mut cx).await; + follower_cx.assert_editor_state(indoc! {" + ˇbase + + + "}); +} + +#[gpui::test] +async fn test_multibuffer_scroll_cursor_top_margin(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let (editor, cx) = cx.add_window_view(|window, cx| { + let multi_buffer = MultiBuffer::build_multi( + [ + ("1\n2\n3\n", vec![Point::row_range(0..3)]), + ("1\n2\n3\n4\n5\n6\n7\n8\n9\n", vec![Point::row_range(0..9)]), + ], + cx, + ); + Editor::new(EditorMode::full(), multi_buffer, None, window, cx) + }); + + let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await; + + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + ˇ1 + 2 + 3 + [EXCERPT] + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + "}); + + cx.update_editor(|editor, window, cx| { + editor.change_selections(None.into(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(19)..MultiBufferOffset(19)]); + }); + }); + + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + 2 + 3 + [EXCERPT] + 1 + 2 + 3 + 4 + 5 + 6 + ˇ7 + 8 + 9 + "}); + + cx.update_editor(|editor, _window, cx| { + editor.set_vertical_scroll_margin(0, cx); + }); + + cx.update_editor(|editor, window, cx| { + assert_eq!(editor.vertical_scroll_margin(), 0); + editor.scroll_cursor_top(&ScrollCursorTop, window, cx); + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 12.0) + ); + }); + + cx.update_editor(|editor, _window, cx| { + editor.set_vertical_scroll_margin(3, cx); + }); + + cx.update_editor(|editor, window, cx| { + assert_eq!(editor.vertical_scroll_margin(), 3); + editor.scroll_cursor_top(&ScrollCursorTop, window, cx); + assert_eq!( + editor.snapshot(window, cx).scroll_position(), + gpui::Point::new(0., 9.0) + ); + }); +} + +#[gpui::test] +async fn test_find_references_single_case(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + references_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + cx, + ) + .await; + + let before = indoc!( + r#" + fn main() { + let aˇbc = 123; + let xyz = abc; + } + "# + ); + let after = indoc!( + r#" + fn main() { + let abc = 123; + let xyz = ˇabc; + } + "# + ); + + cx.lsp + .set_request_handler::(async move |params, _| { + Ok(Some(vec![ + lsp::Location { + uri: params.text_document_position.text_document.uri.clone(), + range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 11)), + }, + lsp::Location { + uri: params.text_document_position.text_document.uri, + range: lsp::Range::new(lsp::Position::new(2, 14), lsp::Position::new(2, 17)), + }, + ])) + }); + + cx.set_state(before); + + let action = FindAllReferences { + always_open_multibuffer: false, + }; + + let navigated = cx + .update_editor(|editor, window, cx| editor.find_all_references(&action, window, cx)) + .expect("should have spawned a task") + .await + .unwrap(); + + assert_eq!(navigated, Navigated::No); + + cx.run_until_parked(); + + cx.assert_editor_state(after); +} + +#[gpui::test] +async fn test_local_worktree_trust(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + cx.update(|cx| project::trusted_worktrees::init(HashMap::default(), None, None, cx)); + + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.inlay_hints = + Some(InlayHintSettingsContent { + enabled: Some(true), + ..InlayHintSettingsContent::default() + }); + }); + }); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + ".zed": { + "settings.json": r#"{"languages":{"Rust":{"language_servers":["override-rust-analyzer"]}}}"# + }, + "main.rs": "fn main() {}" + }), + ) + .await; + + let lsp_inlay_hint_request_count = Arc::new(AtomicUsize::new(0)); + let server_name = "override-rust-analyzer"; + let project = Project::test_with_worktree_trust(fs, [path!("/project").as_ref()], cx).await; + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let capabilities = lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }; + let mut fake_language_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: server_name, + capabilities, + initializer: Some(Box::new({ + let lsp_inlay_hint_request_count = lsp_inlay_hint_request_count.clone(); + move |fake_server| { + let lsp_inlay_hint_request_count = lsp_inlay_hint_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + lsp_inlay_hint_request_count.fetch_add(1, atomic::Ordering::Release); + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 0), + label: lsp::InlayHintLabel::String("hint".to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + cx.run_until_parked(); + + let worktree_id = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .next() + .map(|wt| wt.read(cx).id()) + .expect("should have a worktree") + }); + + let trusted_worktrees = + cx.update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should exist")); + + let can_trust = trusted_worktrees.update(cx, |store, cx| store.can_trust(worktree_id, cx)); + assert!(!can_trust, "worktree should be restricted initially"); + + let buffer_before_approval = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) + .await + .unwrap(); + + let (editor, cx) = cx.add_window_view(|window, cx| { + Editor::new( + EditorMode::full(), + cx.new(|cx| MultiBuffer::singleton(buffer_before_approval.clone(), cx)), + Some(project.clone()), + window, + cx, + ) + }); + cx.run_until_parked(); + let fake_language_server = fake_language_servers.next(); + + cx.read(|cx| { + let file = buffer_before_approval.read(cx).file(); + assert_eq!( + language::language_settings::language_settings(Some("Rust".into()), file, cx) + .language_servers, + ["...".to_string()], + "local .zed/settings.json must not apply before trust approval" + ) + }); + + editor.update_in(cx, |editor, window, cx| { + editor.handle_input("1", window, cx); + }); + cx.run_until_parked(); + cx.executor() + .advance_clock(std::time::Duration::from_secs(1)); + assert_eq!( + lsp_inlay_hint_request_count.load(atomic::Ordering::Acquire), + 0, + "inlay hints must not be queried before trust approval" + ); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + std::collections::HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + None, + cx, + ); + }); + cx.run_until_parked(); + + cx.read(|cx| { + let file = buffer_before_approval.read(cx).file(); + assert_eq!( + language::language_settings::language_settings(Some("Rust".into()), file, cx) + .language_servers, + ["override-rust-analyzer".to_string()], + "local .zed/settings.json should apply after trust approval" + ) + }); + let _fake_language_server = fake_language_server.await.unwrap(); + editor.update_in(cx, |editor, window, cx| { + editor.handle_input("1", window, cx); + }); + cx.run_until_parked(); + cx.executor() + .advance_clock(std::time::Duration::from_secs(1)); + assert!( + lsp_inlay_hint_request_count.load(atomic::Ordering::Acquire) > 0, + "inlay hints should be queried after trust approval" + ); + + let can_trust_after = + trusted_worktrees.update(cx, |store, cx| store.can_trust(worktree_id, cx)); + assert!(can_trust_after, "worktree should be trusted after trust()"); +} + +#[gpui::test] +fn test_editor_rendering_when_positioned_above_viewport(cx: &mut TestAppContext) { + // This test reproduces a bug where drawing an editor at a position above the viewport + // (simulating what happens when an AutoHeight editor inside a List is scrolled past) + // causes an infinite loop in blocks_in_range. + // + // The issue: when the editor's bounds.origin.y is very negative (above the viewport), + // the content mask intersection produces visible_bounds with origin at the viewport top. + // This makes clipped_top_in_lines very large, causing start_row to exceed max_row. + // When blocks_in_range is called with start_row > max_row, the cursor seeks to the end + // but the while loop after seek never terminates because cursor.next() is a no-op at end. + init_test(cx, |_| {}); + + let window = cx.add_window(|_, _| gpui::Empty); + let mut cx = VisualTestContext::from_window(*window, cx); + + let buffer = cx.update(|_, cx| MultiBuffer::build_simple("a\nb\nc\nd\ne\nf\ng\nh\ni\nj\n", cx)); + let editor = cx.new_window_entity(|window, cx| build_editor(buffer, window, cx)); + + // Simulate a small viewport (500x500 pixels at origin 0,0) + cx.simulate_resize(gpui::size(px(500.), px(500.))); + + // Draw the editor at a very negative Y position, simulating an editor that's been + // scrolled way above the visible viewport (like in a List that has scrolled past it). + // The editor is 3000px tall but positioned at y=-10000, so it's entirely above the viewport. + // This should NOT hang - it should just render nothing. + cx.draw( + gpui::point(px(0.), px(-10000.)), + gpui::size(px(500.), px(3000.)), + |_, _| editor.clone(), + ); + + // If we get here without hanging, the test passes +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7442ccc7442a11ab2f845cc637e5ad416085af02..f7b6aa949e74dca9bee73419fa2b87899f9986fd 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -6,11 +6,12 @@ use crate::{ EditDisplayMode, EditPrediction, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, - MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, - OpenExcerptsSplit, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, - SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects, - SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, + MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, + PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, SelectPhase, + SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects, SizingBehavior, + SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP}, + column_pixels, display_map::{ Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins, HighlightKey, HighlightedChunk, ToDisplayPoint, @@ -36,22 +37,18 @@ use crate::{ use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; use collections::{BTreeMap, HashMap}; use file_icons::FileIcons; -use git::{ - Oid, - blame::{BlameEntry, ParsedCommitMessage}, - status::FileStatus, -}; +use git::{Oid, blame::BlameEntry, commit::ParsedCommitMessage, status::FileStatus}; use gpui::{ Action, Along, AnyElement, App, AppContext, AvailableSpace, Axis as ScrollbarAxis, BorderStyle, Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner, Corners, CursorStyle, DispatchPhase, Edges, Element, ElementInputHandler, Entity, Focusable as _, FontId, GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, KeybindingKeystroke, Length, Modifiers, ModifiersChangedEvent, MouseButton, MouseClickEvent, - MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta, - ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement, - Style, Styled, TextRun, TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, - linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, solid_background, - transparent_black, + MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement, + Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, + Size, StatefulInteractiveElement, Style, Styled, TextRun, TextStyleRefinement, WeakEntity, + Window, anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, point, px, + quad, relative, size, solid_background, transparent_black, }; use itertools::Itertools; use language::{IndentGuideSettings, language_settings::ShowWhitespaceSetting}; @@ -61,6 +58,7 @@ use multi_buffer::{ MultiBufferRow, RowInfo, }; +use edit_prediction_types::EditPredictionGranularity; use project::{ Entry, ProjectPath, debugger::breakpoint_store::{Breakpoint, BreakpointSessionState}, @@ -74,6 +72,7 @@ use smallvec::{SmallVec, smallvec}; use std::{ any::TypeId, borrow::Cow, + cell::Cell, cmp::{self, Ordering}, fmt::{self, Write}, iter, mem, @@ -88,7 +87,7 @@ use text::{BufferId, SelectionGoal}; use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor}; use ui::utils::ensure_minimum_contrast; use ui::{ - ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*, + ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, prelude::*, right_click_menu, scrollbars::ShowScrollbar, text_for_keystroke, }; use unicode_segmentation::UnicodeSegmentation; @@ -130,6 +129,7 @@ impl SelectionLayout { fn new( selection: Selection, line_mode: bool, + cursor_offset: bool, cursor_shape: CursorShape, map: &DisplaySnapshot, is_newest: bool, @@ -150,12 +150,9 @@ impl SelectionLayout { } // any vim visual mode (including line mode) - if (cursor_shape == CursorShape::Block || cursor_shape == CursorShape::Hollow) - && !range.is_empty() - && !selection.reversed - { + if cursor_offset && !range.is_empty() && !selection.reversed { if head.column() > 0 { - head = map.clip_point(DisplayPoint::new(head.row(), head.column() - 1), Bias::Left) + head = map.clip_point(DisplayPoint::new(head.row(), head.column() - 1), Bias::Left); } else if head.row().0 > 0 && head != map.max_point() { head = map.clip_point( DisplayPoint::new( @@ -185,6 +182,13 @@ impl SelectionLayout { } } +#[derive(Default)] +struct RenderBlocksOutput { + blocks: Vec, + row_block_types: HashMap, + resized_blocks: Option>, +} + pub struct EditorElement { editor: Entity, style: EditorStyle, @@ -245,6 +249,8 @@ impl EditorElement { register_action(editor, window, Editor::sort_lines_case_insensitive); register_action(editor, window, Editor::reverse_lines); register_action(editor, window, Editor::shuffle_lines); + register_action(editor, window, Editor::rotate_selections_forward); + register_action(editor, window, Editor::rotate_selections_backward); register_action(editor, window, Editor::convert_indentation_to_spaces); register_action(editor, window, Editor::convert_indentation_to_tabs); register_action(editor, window, Editor::convert_to_upper_case); @@ -357,6 +363,7 @@ impl EditorElement { register_action(editor, window, Editor::split_selection_into_lines); register_action(editor, window, Editor::add_selection_above); register_action(editor, window, Editor::add_selection_below); + register_action(editor, window, Editor::insert_snippet_at_selections); register_action(editor, window, |editor, action, window, cx| { editor.select_next(action, window, cx).log_err(); }); @@ -583,8 +590,6 @@ impl EditorElement { register_action(editor, window, Editor::show_signature_help); register_action(editor, window, Editor::signature_help_prev); register_action(editor, window, Editor::signature_help_next); - register_action(editor, window, Editor::next_edit_prediction); - register_action(editor, window, Editor::previous_edit_prediction); register_action(editor, window, Editor::show_edit_prediction); register_action(editor, window, Editor::context_menu_first); register_action(editor, window, Editor::context_menu_prev); @@ -593,7 +598,8 @@ impl EditorElement { register_action(editor, window, Editor::display_cursor_names); register_action(editor, window, Editor::unique_lines_case_insensitive); register_action(editor, window, Editor::unique_lines_case_sensitive); - register_action(editor, window, Editor::accept_partial_edit_prediction); + register_action(editor, window, Editor::accept_next_word_edit_prediction); + register_action(editor, window, Editor::accept_next_line_edit_prediction); register_action(editor, window, Editor::accept_edit_prediction); register_action(editor, window, Editor::restore_file); register_action(editor, window, Editor::git_restore); @@ -1005,10 +1011,16 @@ impl EditorElement { let pending_nonempty_selections = editor.has_pending_nonempty_selection(); let hovered_link_modifier = Editor::is_cmd_or_ctrl_pressed(&event.modifiers(), cx); + let mouse_down_hovered_link_modifier = if let ClickEvent::Mouse(mouse_event) = event { + Editor::is_cmd_or_ctrl_pressed(&mouse_event.down.modifiers, cx) + } else { + true + }; if let Some(mouse_position) = event.mouse_position() && !pending_nonempty_selections && hovered_link_modifier + && mouse_down_hovered_link_modifier && text_hitbox.is_hovered(window) { let point = position_map.point_for_position(mouse_position); @@ -1019,6 +1031,28 @@ impl EditorElement { } } + fn pressure_click( + editor: &mut Editor, + event: &MousePressureEvent, + position_map: &PositionMap, + window: &mut Window, + cx: &mut Context, + ) { + let text_hitbox = &position_map.text_hitbox; + let force_click_possible = + matches!(editor.prev_pressure_stage, Some(PressureStage::Normal)) + && event.stage == PressureStage::Force; + + editor.prev_pressure_stage = Some(event.stage); + + if force_click_possible && text_hitbox.is_hovered(window) { + let point = position_map.point_for_position(event.position); + editor.handle_click_hovered_link(point, event.modifiers, window, cx); + editor.selection_drag_state = SelectionDragState::None; + cx.stop_propagation(); + } + } + fn mouse_dragged( editor: &mut Editor, event: &MouseMoveEvent, @@ -1150,7 +1184,7 @@ impl EditorElement { } } - fn mouse_moved( + pub(crate) fn mouse_moved( editor: &mut Editor, event: &MouseMoveEvent, position_map: &PositionMap, @@ -1161,7 +1195,7 @@ impl EditorElement { let gutter_hitbox = &position_map.gutter_hitbox; let modifiers = event.modifiers; let text_hovered = text_hitbox.is_hovered(window); - let gutter_hovered = gutter_hitbox.is_hovered(window); + let gutter_hovered = gutter_hitbox.bounds.contains(&event.position); editor.set_gutter_hovered(gutter_hovered, cx); editor.show_mouse_cursor(cx); @@ -1219,7 +1253,13 @@ impl EditorElement { editor.hide_blame_popover(false, cx); } } else { - editor.hide_blame_popover(false, cx); + let keyboard_grace = editor + .inline_blame_popover + .as_ref() + .is_some_and(|state| state.keyboard_grace); + if !keyboard_grace { + editor.hide_blame_popover(false, cx); + } } let breakpoint_indicator = if gutter_hovered { @@ -1417,6 +1457,7 @@ impl EditorElement { let layout = SelectionLayout::new( selection, editor.selections.line_mode(), + editor.cursor_offset_on_selection, editor.cursor_shape, &snapshot.display_snapshot, is_newest, @@ -1463,6 +1504,7 @@ impl EditorElement { let drag_cursor_layout = SelectionLayout::new( drop_cursor.clone(), false, + editor.cursor_offset_on_selection, CursorShape::Bar, &snapshot.display_snapshot, false, @@ -1526,6 +1568,7 @@ impl EditorElement { .push(SelectionLayout::new( selection.selection, selection.line_mode, + editor.cursor_offset_on_selection, selection.cursor_shape, &snapshot.display_snapshot, false, @@ -1536,6 +1579,8 @@ impl EditorElement { selections.extend(remote_selections.into_values()); } else if !editor.is_focused(window) && editor.show_cursor_when_unfocused { + let cursor_offset_on_selection = editor.cursor_offset_on_selection; + let layouts = snapshot .buffer_snapshot() .selections_in_range(&(start_anchor..end_anchor), true) @@ -1543,6 +1588,7 @@ impl EditorElement { SelectionLayout::new( selection, line_mode, + cursor_offset_on_selection, cursor_shape, &snapshot.display_snapshot, false, @@ -2252,7 +2298,8 @@ impl EditorElement { }; let padding = ProjectSettings::get_global(cx).diagnostics.inline.padding as f32 * em_width; - let min_x = self.column_pixels( + let min_x = column_pixels( + &self.style, ProjectSettings::get_global(cx) .diagnostics .inline @@ -2326,7 +2373,7 @@ impl EditorElement { .opacity(0.05)) .text_color(severity_to_color(&diagnostic_to_render.severity).color(cx)) .text_sm() - .font_family(style.text.font().family) + .font(style.text.font()) .child(diagnostic_to_render.message.clone()) .into_any(); @@ -2503,7 +2550,6 @@ impl EditorElement { scroll_position: gpui::Point, scroll_pixel_position: gpui::Point, line_height: Pixels, - text_hitbox: &Hitbox, window: &mut Window, cx: &mut App, ) -> Option { @@ -2556,7 +2602,8 @@ impl EditorElement { let padded_line_end = line_end + padding; - let min_column_in_pixels = self.column_pixels( + let min_column_in_pixels = column_pixels( + &self.style, ProjectSettings::get_global(cx).git.inline_blame.min_column as usize, window, ); @@ -2572,16 +2619,6 @@ impl EditorElement { let size = element.layout_as_root(AvailableSpace::min_size(), window, cx); let bounds = Bounds::new(absolute_offset, size); - self.layout_blame_entry_popover( - entry.clone(), - blame, - line_height, - text_hitbox, - row_info.buffer_id?, - window, - cx, - ); - element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), window, cx); Some(InlineBlameLayout { @@ -2592,16 +2629,48 @@ impl EditorElement { }) } - fn layout_blame_entry_popover( + fn layout_blame_popover( &self, - blame_entry: BlameEntry, - blame: Entity, - line_height: Pixels, + editor_snapshot: &EditorSnapshot, text_hitbox: &Hitbox, - buffer: BufferId, + line_height: Pixels, window: &mut Window, cx: &mut App, ) { + if !self.editor.read(cx).inline_blame_popover.is_some() { + return; + } + + let Some(blame) = self.editor.read(cx).blame.clone() else { + return; + }; + let cursor_point = self + .editor + .read(cx) + .selections + .newest::(&editor_snapshot.display_snapshot) + .head(); + + let Some((buffer, buffer_point, _)) = editor_snapshot + .buffer_snapshot() + .point_to_buffer_point(cursor_point) + else { + return; + }; + + let row_info = RowInfo { + buffer_id: Some(buffer.remote_id()), + buffer_row: Some(buffer_point.row), + ..Default::default() + }; + + let Some((buffer_id, blame_entry)) = blame + .update(cx, |blame, cx| blame.blame_for_rows(&[row_info], cx).next()) + .flatten() + else { + return; + }; + let Some((popover_state, target_point)) = self.editor.read_with(cx, |editor, _| { editor .inline_blame_popover @@ -2623,7 +2692,7 @@ impl EditorElement { popover_state.markdown, workspace, &blame, - buffer, + buffer_id, window, cx, ) @@ -2758,7 +2827,7 @@ impl EditorElement { .enumerate() .filter_map(|(i, indent_guide)| { let single_indent_width = - self.column_pixels(indent_guide.tab_size as usize, window); + column_pixels(&self.style, indent_guide.tab_size as usize, window); let total_width = single_indent_width * indent_guide.depth as f32; let start_x = Pixels::from( ScrollOffset::from(content_origin.x + total_width) @@ -2815,7 +2884,7 @@ impl EditorElement { .wrap_guides(cx) .into_iter() .flat_map(|(guide, active)| { - let wrap_position = self.column_pixels(guide, window); + let wrap_position = column_pixels(&self.style, guide, window); let wrap_guide_x = wrap_position + horizontal_offset; let display_wrap_guide = wrap_guide_x >= content_origin && wrap_guide_x <= hitbox.bounds.right() - vertical_scrollbar_width; @@ -3243,6 +3312,7 @@ impl EditorElement { SelectionLayout::new( newest, editor.selections.line_mode(), + editor.cursor_offset_on_selection, editor.cursor_shape, &snapshot.display_snapshot, true, @@ -3266,6 +3336,8 @@ impl EditorElement { line_number.clear(); let non_relative_number = if relative.wrapped() { row_info.buffer_row.or(row_info.wrapped_buffer_row)? + 1 + } else if self.editor.read(cx).use_base_text_line_numbers { + row_info.base_text_row?.0 + 1 } else { row_info.buffer_row? + 1 }; @@ -3274,6 +3346,7 @@ impl EditorElement { && row_info .diff_status .is_some_and(|status| status.is_deleted()) + && !self.editor.read(cx).use_base_text_line_numbers { return None; } @@ -3664,8 +3737,10 @@ impl EditorElement { row_block_types: &mut HashMap, selections: &[Selection], selected_buffer_ids: &Vec, + latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, sticky_header_excerpt_id: Option, + block_resize_offset: &mut i32, window: &mut Window, cx: &mut App, ) -> Option<(AnyElement, Size, DisplayRow, Pixels)> { @@ -3739,7 +3814,13 @@ impl EditorElement { let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id); let result = v_flex().id(block_id).w_full().pr(editor_margins.right); - let jump_data = header_jump_data(snapshot, block_row_start, *height, first_excerpt); + let jump_data = header_jump_data( + snapshot, + block_row_start, + *height, + first_excerpt, + latest_selection_anchors, + ); result .child(self.render_buffer_header( first_excerpt, @@ -3774,7 +3855,13 @@ impl EditorElement { Block::BufferHeader { excerpt, height } => { let mut result = v_flex().id(block_id).w_full(); - let jump_data = header_jump_data(snapshot, block_row_start, *height, excerpt); + let jump_data = header_jump_data( + snapshot, + block_row_start, + *height, + excerpt, + latest_selection_anchors, + ); if sticky_header_excerpt_id != Some(excerpt.id) { let selected = selected_buffer_ids.contains(&excerpt.buffer_id); @@ -3807,7 +3894,10 @@ impl EditorElement { }; let mut element_height_in_lines = ((final_size.height / line_height).ceil() as u32).max(1); - let mut row = block_row_start; + let effective_row_start = block_row_start.0 as i32 + *block_resize_offset; + debug_assert!(effective_row_start >= 0); + let mut row = DisplayRow(effective_row_start.max(0) as u32); + let mut x_offset = px(0.); let mut is_block = true; @@ -3837,6 +3927,7 @@ impl EditorElement { } }; if element_height_in_lines != block.height() { + *block_resize_offset += element_height_in_lines as i32 - block.height() as i32; resized_blocks.insert(custom_block_id, element_height_in_lines); } } @@ -3859,6 +3950,8 @@ impl EditorElement { ) -> impl IntoElement { let editor = self.editor.read(cx); let multi_buffer = editor.buffer.read(cx); + let is_read_only = self.editor.read(cx).read_only(cx); + let file_status = multi_buffer .all_diff_hunks_expanded() .then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx)) @@ -3911,7 +4004,7 @@ impl EditorElement { .gap_1p5() .when(is_sticky, |el| el.shadow_md()) .border_1() - .map(|div| { + .map(|border| { let border_color = if is_selected && is_folded && focus_handle.contains_focused(window, cx) @@ -3920,7 +4013,7 @@ impl EditorElement { } else { colors.border }; - div.border_color(border_color) + border.border_color(border_color) }) .bg(colors.editor_subheader_background) .hover(|style| style.bg(colors.element_hover)) @@ -3943,9 +4036,14 @@ impl EditorElement { .children(toggle_chevron_icon) .tooltip({ let focus_handle = focus_handle.clone(); + let is_folded_for_tooltip = is_folded; move |_window, cx| { Tooltip::with_meta_in( - "Toggle Excerpt Fold", + if is_folded_for_tooltip { + "Unfold Excerpt" + } else { + "Fold Excerpt" + }, Some(&ToggleFold), format!( "{} to toggle all", @@ -3995,21 +4093,24 @@ impl EditorElement { }) .take(1), ) - .child( - h_flex() - .size(rems_from_px(12.0)) - .justify_center() - .flex_shrink_0() - .children(indicator), - ) + .when(!is_read_only, |this| { + this.child( + h_flex() + .size_3() + .justify_center() + .flex_shrink_0() + .children(indicator), + ) + }) .child( h_flex() .cursor_pointer() - .id("path header block") + .id("path_header_block") + .min_w_0() .size_full() .justify_between() .overflow_hidden() - .child(h_flex().gap_0p5().map(|path_header| { + .child(h_flex().min_w_0().flex_1().gap_0p5().map(|path_header| { let filename = filename .map(SharedString::from) .unwrap_or_else(|| "untitled".into()); @@ -4019,54 +4120,38 @@ impl EditorElement { let path = path::Path::new(filename.as_str()); let icon = FileIcons::get_icon(path, cx).unwrap_or_default(); - let icon = Icon::from_path(icon).color(Color::Muted); - el.child(icon) + + el.child(Icon::from_path(icon).color(Color::Muted)) }) .child( ButtonLike::new("filename-button") - .style(ButtonStyle::Subtle) .child( - div() - .child( - Label::new(filename) - .single_line() - .color(file_status_label_color( - file_status, - )) - .when( - file_status.is_some_and(|s| { - s.is_deleted() - }), - |label| label.strikethrough(), - ), - ) - .group_hover("", |div| div.underline()), + Label::new(filename) + .single_line() + .color(file_status_label_color(file_status)) + .when( + file_status.is_some_and(|s| s.is_deleted()), + |label| label.strikethrough(), + ), ) - .on_click({ - let focus_handle = focus_handle.clone(); - move |event, window, cx| { - if event.modifiers().secondary() { - focus_handle.dispatch_action( - &OpenExcerptsSplit, - window, - cx, - ); - } else { - focus_handle.dispatch_action( - &OpenExcerpts, - window, - cx, - ); - } + .on_click(window.listener_for(&self.editor, { + let jump_data = jump_data.clone(); + move |editor, e: &ClickEvent, window, cx| { + editor.open_excerpts_common( + Some(jump_data.clone()), + e.modifiers().secondary(), + window, + cx, + ); } - }), + })), ) .when_some(parent_path, |then, path| { - then.child(div().child(path).text_color( + then.child(Label::new(path).truncate().color( if file_status.is_some_and(FileStatus::is_deleted) { - colors.text_disabled + Color::Custom(colors.text_disabled) } else { - colors.text_muted + Color::Custom(colors.text_muted) }, )) }) @@ -4075,36 +4160,24 @@ impl EditorElement { can_open_excerpts && is_selected && relative_path.is_some(), |el| { el.child( - ButtonLike::new("open-file-button") + Button::new("open-file-button", "Open File") .style(ButtonStyle::OutlinedGhost) - .child( - h_flex() - .gap_2p5() - .child(Label::new("Open file")) - .child(KeyBinding::for_action_in( - &OpenExcerpts, - &focus_handle, + .key_binding(KeyBinding::for_action_in( + &OpenExcerpts, + &focus_handle, + cx, + )) + .on_click(window.listener_for(&self.editor, { + let jump_data = jump_data.clone(); + move |editor, e: &ClickEvent, window, cx| { + editor.open_excerpts_common( + Some(jump_data.clone()), + e.modifiers().secondary(), + window, cx, - )), - ) - .on_click({ - let focus_handle = focus_handle.clone(); - move |event, window, cx| { - if event.modifiers().secondary() { - focus_handle.dispatch_action( - &OpenExcerptsSplit, - window, - cx, - ); - } else { - focus_handle.dispatch_action( - &OpenExcerpts, - window, - cx, - ); - } + ); } - }), + })), ) }, ) @@ -4250,11 +4323,12 @@ impl EditorElement { line_layouts: &mut [LineWithInvisibles], selections: &[Selection], selected_buffer_ids: &Vec, + latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, sticky_header_excerpt_id: Option, window: &mut Window, cx: &mut App, - ) -> Result<(Vec, HashMap), HashMap> { + ) -> RenderBlocksOutput { let (fixed_blocks, non_fixed_blocks) = snapshot .blocks_in_range(rows.clone()) .partition::, _>(|(_, block)| block.style() == BlockStyle::Fixed); @@ -4266,6 +4340,7 @@ impl EditorElement { let mut blocks = Vec::new(); let mut resized_blocks = HashMap::default(); let mut row_block_types = HashMap::default(); + let mut block_resize_offset: i32 = 0; for (row, block) in fixed_blocks { let block_id = block.id(); @@ -4293,8 +4368,10 @@ impl EditorElement { &mut row_block_types, selections, selected_buffer_ids, + latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + &mut block_resize_offset, window, cx, ) { @@ -4350,8 +4427,10 @@ impl EditorElement { &mut row_block_types, selections, selected_buffer_ids, + latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + &mut block_resize_offset, window, cx, ) { @@ -4405,8 +4484,10 @@ impl EditorElement { &mut row_block_types, selections, selected_buffer_ids, + latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + &mut block_resize_offset, window, cx, ) { @@ -4426,9 +4507,12 @@ impl EditorElement { if resized_blocks.is_empty() { *scroll_width = (*scroll_width).max(fixed_block_max_width - editor_margins.gutter.width); - Ok((blocks, row_block_types)) - } else { - Err(resized_blocks) + } + + RenderBlocksOutput { + blocks, + row_block_types, + resized_blocks: (!resized_blocks.is_empty()).then_some(resized_blocks), } } @@ -4487,6 +4571,7 @@ impl EditorElement { hitbox: &Hitbox, selected_buffer_ids: &Vec, blocks: &[BlockLayout], + latest_selection_anchors: &HashMap, window: &mut Window, cx: &mut App, ) -> AnyElement { @@ -4495,6 +4580,7 @@ impl EditorElement { DisplayRow(scroll_position.y as u32), FILE_HEADER_HEIGHT + MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, excerpt, + latest_selection_anchors, ); let editor_bg_color = cx.theme().colors().editor_background; @@ -4565,6 +4651,7 @@ impl EditorElement { gutter_dimensions: &GutterDimensions, gutter_hitbox: &Hitbox, text_hitbox: &Hitbox, + style: &EditorStyle, window: &mut Window, cx: &mut App, ) -> Option { @@ -4572,7 +4659,7 @@ impl EditorElement { .show_line_numbers .unwrap_or_else(|| EditorSettings::get_global(cx).gutter.line_numbers); - let rows = Self::sticky_headers(self.editor.read(cx), snapshot, cx); + let rows = Self::sticky_headers(self.editor.read(cx), snapshot, style, cx); let mut lines = Vec::::new(); @@ -4631,6 +4718,7 @@ impl EditorElement { pub(crate) fn sticky_headers( editor: &Editor, snapshot: &EditorSnapshot, + style: &EditorStyle, cx: &App, ) -> Vec { let scroll_top = snapshot.scroll_position().y; @@ -4638,7 +4726,7 @@ impl EditorElement { let mut end_rows = Vec::::new(); let mut rows = Vec::::new(); - let items = editor.sticky_headers(cx).unwrap_or_default(); + let items = editor.sticky_headers(style, cx).unwrap_or_default(); for item in items { let start_point = item.range.start.to_point(snapshot.buffer_snapshot()); @@ -4808,8 +4896,11 @@ impl EditorElement { let edit_prediction = if edit_prediction_popover_visible { self.editor.update(cx, move |editor, cx| { - let accept_binding = - editor.accept_edit_prediction_keybind(false, window, cx); + let accept_binding = editor.accept_edit_prediction_keybind( + EditPredictionGranularity::Full, + window, + cx, + ); let mut element = editor.render_edit_prediction_cursor_popover( min_width, max_width, @@ -5201,7 +5292,7 @@ impl EditorElement { ) -> Option { let max_height_in_lines = ((height - POPOVER_Y_PADDING) / line_height).floor() as u32; self.editor.update(cx, |editor, cx| { - editor.render_context_menu(&self.style, max_height_in_lines, window, cx) + editor.render_context_menu(max_height_in_lines, window, cx) }) } @@ -5228,16 +5319,18 @@ impl EditorElement { window: &mut Window, cx: &mut App, ) -> Option { - let position = self.editor.update(cx, |editor, _cx| { + let position = self.editor.update(cx, |editor, cx| { let visible_start_point = editor.display_to_pixel_point( DisplayPoint::new(visible_range.start, 0), editor_snapshot, window, + cx, )?; let visible_end_point = editor.display_to_pixel_point( DisplayPoint::new(visible_range.end, 0), editor_snapshot, window, + cx, )?; let mouse_context_menu = editor.mouse_context_menu.as_ref()?; @@ -5245,7 +5338,8 @@ impl EditorElement { MenuPosition::PinnedToScreen(point) => (None, point), MenuPosition::PinnedToEditor { source, offset } => { let source_display_point = source.to_display_point(editor_snapshot); - let source_point = editor.to_pixel_point(source, editor_snapshot, window)?; + let source_point = + editor.to_pixel_point(source, editor_snapshot, window, cx)?; let position = content_origin + source_point + offset; (Some(source_display_point), position) } @@ -5552,6 +5646,50 @@ impl EditorElement { } } + fn layout_word_diff_highlights( + display_hunks: &[(DisplayDiffHunk, Option)], + row_infos: &[RowInfo], + start_row: DisplayRow, + snapshot: &EditorSnapshot, + highlighted_ranges: &mut Vec<(Range, Hsla)>, + cx: &mut App, + ) { + let colors = cx.theme().colors(); + + let word_highlights = display_hunks + .into_iter() + .filter_map(|(hunk, _)| match hunk { + DisplayDiffHunk::Unfolded { + word_diffs, status, .. + } => Some((word_diffs, status)), + _ => None, + }) + .filter(|(_, status)| status.is_modified()) + .flat_map(|(word_diffs, _)| word_diffs) + .filter_map(|word_diff| { + let start_point = word_diff.start.to_display_point(&snapshot.display_snapshot); + let end_point = word_diff.end.to_display_point(&snapshot.display_snapshot); + let start_row_offset = start_point.row().0.saturating_sub(start_row.0) as usize; + + row_infos + .get(start_row_offset) + .and_then(|row_info| row_info.diff_status) + .and_then(|diff_status| { + let background_color = match diff_status.kind { + DiffHunkStatusKind::Added => colors.version_control_word_added, + DiffHunkStatusKind::Deleted => colors.version_control_word_deleted, + DiffHunkStatusKind::Modified => { + debug_panic!("modified diff status for row info"); + return None; + } + }; + Some((start_point..end_point, background_color)) + }) + }); + + highlighted_ranges.extend(word_highlights); + } + fn layout_diff_hunk_controls( &self, row_range: Range, @@ -7652,6 +7790,19 @@ impl EditorElement { } }); + window.on_mouse_event({ + let position_map = layout.position_map.clone(); + let editor = self.editor.clone(); + + move |event: &MousePressureEvent, phase, window, cx| { + if phase == DispatchPhase::Bubble { + editor.update(cx, |editor, cx| { + Self::pressure_click(editor, &event, &position_map, window, cx); + }) + } + } + }); + window.on_mouse_event({ let position_map = layout.position_map.clone(); let editor = self.editor.clone(); @@ -7675,29 +7826,6 @@ impl EditorElement { }); } - fn column_pixels(&self, column: usize, window: &Window) -> Pixels { - let style = &self.style; - let font_size = style.text.font_size.to_pixels(window.rem_size()); - let layout = window.text_system().shape_line( - SharedString::from(" ".repeat(column)), - font_size, - &[TextRun { - len: column, - font: style.text.font(), - color: Hsla::default(), - ..Default::default() - }], - None, - ); - - layout.width - } - - fn max_line_number_width(&self, snapshot: &EditorSnapshot, window: &mut Window) -> Pixels { - let digit_count = snapshot.widest_line_number().ilog10() + 1; - self.column_pixels(digit_count as usize, window) - } - fn shape_line_number( &self, text: SharedString, @@ -7794,18 +7922,52 @@ fn file_status_label_color(file_status: Option) -> Color { } fn header_jump_data( + editor_snapshot: &EditorSnapshot, + block_row_start: DisplayRow, + height: u32, + first_excerpt: &ExcerptInfo, + latest_selection_anchors: &HashMap, +) -> JumpData { + let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id) + && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id) + && let Some(buffer) = editor_snapshot + .buffer_snapshot() + .buffer_for_excerpt(anchor.excerpt_id) + { + JumpTargetInExcerptInput { + id: anchor.excerpt_id, + buffer, + excerpt_start_anchor: range.start, + jump_anchor: anchor.text_anchor, + } + } else { + JumpTargetInExcerptInput { + id: first_excerpt.id, + buffer: &first_excerpt.buffer, + excerpt_start_anchor: first_excerpt.range.context.start, + jump_anchor: first_excerpt.range.primary.start, + } + }; + header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target) +} + +struct JumpTargetInExcerptInput<'a> { + id: ExcerptId, + buffer: &'a language::BufferSnapshot, + excerpt_start_anchor: text::Anchor, + jump_anchor: text::Anchor, +} + +fn header_jump_data_inner( snapshot: &EditorSnapshot, block_row_start: DisplayRow, height: u32, - for_excerpt: &ExcerptInfo, + for_excerpt: &JumpTargetInExcerptInput, ) -> JumpData { - let range = &for_excerpt.range; let buffer = &for_excerpt.buffer; - let jump_anchor = range.primary.start; - - let excerpt_start = range.context.start; - let jump_position = language::ToPoint::to_point(&jump_anchor, buffer); - let rows_from_excerpt_start = if jump_anchor == excerpt_start { + let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer); + let excerpt_start = for_excerpt.excerpt_start_anchor; + let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start { 0 } else { let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer); @@ -7822,7 +7984,7 @@ fn header_jump_data( JumpData::MultiBufferPoint { excerpt_id: for_excerpt.id, - anchor: jump_anchor, + anchor: for_excerpt.jump_anchor, position: jump_position, line_offset_from_top, } @@ -8620,7 +8782,7 @@ impl LineWithInvisibles { let fragment_end_x = fragment_start_x + shaped_line.width; if x < fragment_end_x { return Some( - fragment_start_index + shaped_line.index_for_x(x - fragment_start_x), + fragment_start_index + shaped_line.index_for_x(x - fragment_start_x)?, ); } fragment_start_x = fragment_end_x; @@ -8733,8 +8895,48 @@ impl EditorElement { } } +#[derive(Default)] +pub struct EditorRequestLayoutState { + // We use prepaint depth to limit the number of times prepaint is + // called recursively. We need this so that we can update stale + // data for e.g. block heights in block map. + prepaint_depth: Rc>, +} + +impl EditorRequestLayoutState { + // In ideal conditions we only need one more subsequent prepaint call for resize to take effect. + // i.e. MAX_PREPAINT_DEPTH = 2, but since moving blocks inline (place_near), more lines from + // below get exposed, and we end up querying blocks for those lines too in subsequent renders. + // Setting MAX_PREPAINT_DEPTH = 3, passes all tests. Just to be on the safe side we set it to 5, so + // that subsequent shrinking does not lead to incorrect block placing. + const MAX_PREPAINT_DEPTH: usize = 5; + + fn increment_prepaint_depth(&self) -> EditorPrepaintGuard { + let depth = self.prepaint_depth.get(); + self.prepaint_depth.set(depth + 1); + EditorPrepaintGuard { + prepaint_depth: self.prepaint_depth.clone(), + } + } + + fn can_prepaint(&self) -> bool { + self.prepaint_depth.get() < Self::MAX_PREPAINT_DEPTH + } +} + +struct EditorPrepaintGuard { + prepaint_depth: Rc>, +} + +impl Drop for EditorPrepaintGuard { + fn drop(&mut self) { + let depth = self.prepaint_depth.get(); + self.prepaint_depth.set(depth.saturating_sub(1)); + } +} + impl Element for EditorElement { - type RequestLayoutState = (); + type RequestLayoutState = EditorRequestLayoutState; type PrepaintState = EditorLayout; fn id(&self) -> Option { @@ -8751,7 +8953,7 @@ impl Element for EditorElement { _inspector_id: Option<&gpui::InspectorElementId>, window: &mut Window, cx: &mut App, - ) -> (gpui::LayoutId, ()) { + ) -> (gpui::LayoutId, Self::RequestLayoutState) { let rem_size = self.rem_size(cx); window.with_rem_size(rem_size, |window| { self.editor.update(cx, |editor, cx| { @@ -8771,8 +8973,6 @@ impl Element for EditorElement { max_lines, } => { let editor_handle = cx.entity(); - let max_line_number_width = - self.max_line_number_width(&editor.snapshot(window, cx), window); window.request_measured_layout( Style::default(), move |known_dimensions, available_space, window, cx| { @@ -8782,7 +8982,6 @@ impl Element for EditorElement { editor, min_lines, max_lines, - max_line_number_width, known_dimensions, available_space.width, window, @@ -8818,7 +9017,7 @@ impl Element for EditorElement { } }; - (layout_id, ()) + (layout_id, EditorRequestLayoutState::default()) }) }) } @@ -8828,10 +9027,11 @@ impl Element for EditorElement { _: Option<&GlobalElementId>, _inspector_id: Option<&gpui::InspectorElementId>, bounds: Bounds, - _: &mut Self::RequestLayoutState, + request_layout: &mut Self::RequestLayoutState, window: &mut Window, cx: &mut App, ) -> Self::PrepaintState { + let _prepaint_depth_guard = request_layout.increment_prepaint_depth(); let text_style = TextStyleRefinement { font_size: Some(self.style.text.font_size), line_height: Some(self.style.text.line_height), @@ -8868,15 +9068,10 @@ impl Element for EditorElement { .gutter_dimensions( font_id, font_size, - self.max_line_number_width(&snapshot, window), + style, + window, cx, - ) - .or_else(|| { - self.editor.read(cx).offset_content.then(|| { - GutterDimensions::default_with_margin(font_id, font_size, cx) - }) - }) - .unwrap_or_default(); + ); let text_width = bounds.size.width - gutter_dimensions.width; let settings = EditorSettings::get_global(cx); @@ -8963,6 +9158,15 @@ impl Element for EditorElement { let height_in_lines = f64::from(bounds.size.height / line_height); let max_row = snapshot.max_point().row().as_f64(); + // Calculate how much of the editor is clipped by parent containers (e.g., List). + // This allows us to only render lines that are actually visible, which is + // critical for performance when large AutoHeight editors are inside Lists. + let visible_bounds = window.content_mask().bounds; + let clipped_top = (visible_bounds.origin.y - bounds.origin.y).max(px(0.)); + let clipped_top_in_lines = f64::from(clipped_top / line_height); + let visible_height_in_lines = + f64::from(visible_bounds.size.height / line_height); + // The max scroll position for the top of the window let max_scroll_top = if matches!( snapshot.mode, @@ -9019,15 +9223,21 @@ impl Element for EditorElement { let mut scroll_position = snapshot.scroll_position(); // The scroll position is a fractional point, the whole number of which represents // the top of the window in terms of display rows. - let start_row = DisplayRow(scroll_position.y as u32); + // We add clipped_top_in_lines to skip rows that are clipped by parent containers, + // but we don't modify scroll_position itself since the parent handles positioning. let max_row = snapshot.max_point().row(); + let start_row = cmp::min( + DisplayRow((scroll_position.y + clipped_top_in_lines).floor() as u32), + max_row, + ); let end_row = cmp::min( - (scroll_position.y + height_in_lines).ceil() as u32, + (scroll_position.y + clipped_top_in_lines + visible_height_in_lines).ceil() + as u32, max_row.next_row().0, ); let end_row = DisplayRow(end_row); - let row_infos = snapshot + let row_infos = snapshot // note we only get the visual range .row_infos(start_row) .take((start_row..end_row).len()) .collect::>(); @@ -9058,16 +9268,27 @@ impl Element for EditorElement { let is_light = cx.theme().appearance().is_light(); + let mut highlighted_ranges = self + .editor_with_selections(cx) + .map(|editor| { + editor.read(cx).background_highlights_in_range( + start_anchor..end_anchor, + &snapshot.display_snapshot, + cx.theme(), + ) + }) + .unwrap_or_default(); + for (ix, row_info) in row_infos.iter().enumerate() { let Some(diff_status) = row_info.diff_status else { continue; }; let background_color = match diff_status.kind { - DiffHunkStatusKind::Added => cx.theme().colors().version_control_added, - DiffHunkStatusKind::Deleted => { - cx.theme().colors().version_control_deleted - } + DiffHunkStatusKind::Added => + cx.theme().colors().version_control_added, + DiffHunkStatusKind::Deleted => + cx.theme().colors().version_control_deleted, DiffHunkStatusKind::Modified => { debug_panic!("modified diff status for row info"); continue; @@ -9105,21 +9326,14 @@ impl Element for EditorElement { filled_highlight }; + let base_display_point = + DisplayPoint::new(start_row + DisplayRow(ix as u32), 0); + highlighted_rows - .entry(start_row + DisplayRow(ix as u32)) + .entry(base_display_point.row()) .or_insert(background); } - let highlighted_ranges = self - .editor_with_selections(cx) - .map(|editor| { - editor.read(cx).background_highlights_in_range( - start_anchor..end_anchor, - &snapshot.display_snapshot, - cx.theme(), - ) - }) - .unwrap_or_default(); let highlighted_gutter_ranges = self.editor.read(cx).gutter_highlights_in_range( start_anchor..end_anchor, @@ -9139,15 +9353,18 @@ impl Element for EditorElement { cx, ); - let (local_selections, selected_buffer_ids): ( + let (local_selections, selected_buffer_ids, latest_selection_anchors): ( Vec>, Vec, + HashMap, ) = self .editor_with_selections(cx) .map(|editor| { editor.update(cx, |editor, cx| { let all_selections = editor.selections.all::(&snapshot.display_snapshot); + let all_anchor_selections = + editor.selections.all_anchors(&snapshot.display_snapshot); let selected_buffer_ids = if editor.buffer_kind(cx) == ItemBufferKind::Singleton { Vec::new() @@ -9176,10 +9393,31 @@ impl Element for EditorElement { selections .extend(editor.selections.pending(&snapshot.display_snapshot)); - (selections, selected_buffer_ids) + let mut anchors_by_buffer: HashMap = + HashMap::default(); + for selection in all_anchor_selections.iter() { + let head = selection.head(); + if let Some(buffer_id) = head.text_anchor.buffer_id { + anchors_by_buffer + .entry(buffer_id) + .and_modify(|(latest_id, latest_anchor)| { + if selection.id > *latest_id { + *latest_id = selection.id; + *latest_anchor = head; + } + }) + .or_insert((selection.id, head)); + } + } + let latest_selection_anchors = anchors_by_buffer + .into_iter() + .map(|(buffer_id, (_, anchor))| (buffer_id, anchor)) + .collect(); + + (selections, selected_buffer_ids, latest_selection_anchors) }) }) - .unwrap_or_default(); + .unwrap_or_else(|| (Vec::new(), Vec::new(), HashMap::default())); let (selections, mut active_rows, newest_selection_head) = self .layout_selections( @@ -9268,7 +9506,7 @@ impl Element for EditorElement { let crease_trailers = window.with_element_namespace("crease_trailers", |window| { self.layout_crease_trailers( - row_infos.iter().copied(), + row_infos.iter().cloned(), &snapshot, window, cx, @@ -9284,6 +9522,15 @@ impl Element for EditorElement { cx, ); + Self::layout_word_diff_highlights( + &display_hunks, + &row_infos, + start_row, + &snapshot, + &mut highlighted_ranges, + cx, + ); + let merged_highlighted_ranges = if let Some((_, colors)) = document_colors.as_ref() { &highlighted_ranges @@ -9331,7 +9578,20 @@ impl Element for EditorElement { // If the fold widths have changed, we need to prepaint // the element again to account for any changes in // wrapping. - return self.prepaint(None, _inspector_id, bounds, &mut (), window, cx); + if request_layout.can_prepaint() { + return self.prepaint( + None, + _inspector_id, + bounds, + request_layout, + window, + cx, + ); + } else { + debug_panic!( + "skipping recursive prepaint at max depth. renderer widths may be stale." + ); + } } let longest_line_blame_width = self @@ -9410,6 +9670,7 @@ impl Element for EditorElement { &mut line_layouts, &local_selections, &selected_buffer_ids, + &latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, window, @@ -9417,20 +9678,35 @@ impl Element for EditorElement { ) }) }) - .unwrap_or_else(|| Ok((Vec::default(), HashMap::default()))); - let (mut blocks, row_block_types) = match blocks { - Ok(blocks) => blocks, - Err(resized_blocks) => { - self.editor.update(cx, |editor, cx| { - editor.resize_blocks( - resized_blocks, - autoscroll_request.map(|(autoscroll, _)| autoscroll), - cx, - ) - }); - return self.prepaint(None, _inspector_id, bounds, &mut (), window, cx); + .unwrap_or_default(); + let RenderBlocksOutput { + mut blocks, + row_block_types, + resized_blocks, + } = blocks; + if let Some(resized_blocks) = resized_blocks { + self.editor.update(cx, |editor, cx| { + editor.resize_blocks( + resized_blocks, + autoscroll_request.map(|(autoscroll, _)| autoscroll), + cx, + ) + }); + if request_layout.can_prepaint() { + return self.prepaint( + None, + _inspector_id, + bounds, + request_layout, + window, + cx, + ); + } else { + debug_panic!( + "skipping recursive prepaint at max depth. block layout may be stale." + ); } - }; + } let sticky_buffer_header = sticky_header_excerpt.map(|sticky_header_excerpt| { window.with_element_namespace("blocks", |window| { @@ -9443,6 +9719,7 @@ impl Element for EditorElement { &hitbox, &selected_buffer_ids, &blocks, + &latest_selection_anchors, window, cx, ) @@ -9500,6 +9777,7 @@ impl Element for EditorElement { &gutter_dimensions, &gutter_hitbox, &text_hitbox, + &style, window, cx, ) @@ -9607,7 +9885,6 @@ impl Element for EditorElement { scroll_position, scroll_pixel_position, line_height, - &text_hitbox, window, cx, ) { @@ -9805,6 +10082,8 @@ impl Element for EditorElement { window, cx, ); + + self.layout_blame_popover(&snapshot, &hitbox, line_height, window, cx); } let mouse_context_menu = self.layout_mouse_context_menu( @@ -10558,9 +10837,9 @@ impl ScrollbarLayout { show_thumb: bool, axis: ScrollbarAxis, ) -> Self { - let text_units_per_page = f64::from(viewport_size / glyph_space); + let text_units_per_page = viewport_size.to_f64() / glyph_space.to_f64(); let visible_range = scroll_position..scroll_position + text_units_per_page; - let total_text_units = scroll_range / f64::from(glyph_space); + let total_text_units = scroll_range / glyph_space.to_f64(); let thumb_percentage = text_units_per_page / total_text_units; let thumb_size = Pixels::from(ScrollOffset::from(track_length) * thumb_percentage) @@ -11215,7 +11494,6 @@ fn compute_auto_height_layout( editor: &mut Editor, min_lines: usize, max_lines: Option, - max_line_number_width: Pixels, known_dimensions: Size>, available_width: AvailableSpace, window: &mut Window, @@ -11239,14 +11517,7 @@ fn compute_auto_height_layout( let em_width = window.text_system().em_width(font_id, font_size).unwrap(); let mut snapshot = editor.snapshot(window, cx); - let gutter_dimensions = snapshot - .gutter_dimensions(font_id, font_size, max_line_number_width, cx) - .or_else(|| { - editor - .offset_content - .then(|| GutterDimensions::default_with_margin(font_id, font_size, cx)) - }) - .unwrap_or_default(); + let gutter_dimensions = snapshot.gutter_dimensions(font_id, font_size, style, window, cx); editor.gutter_dimensions = gutter_dimensions; let text_width = width - gutter_dimensions.width; @@ -11309,7 +11580,7 @@ mod tests { }); let cx = &mut VisualTestContext::from_window(*window, cx); let editor = window.root(cx).unwrap(); - let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + let style = cx.update(|_, cx| editor.update(cx, |editor, cx| editor.style(cx).clone())); for x in 1..=100 { let (_, state) = cx.draw( @@ -11337,7 +11608,7 @@ mod tests { }); let cx = &mut VisualTestContext::from_window(*window, cx); let editor = window.root(cx).unwrap(); - let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + let style = cx.update(|_, cx| editor.update(cx, |editor, cx| editor.style(cx).clone())); for x in 1..=100 { let (_, state) = cx.draw( @@ -11362,7 +11633,7 @@ mod tests { }); let editor = window.root(cx).unwrap(); - let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + let style = editor.update(cx, |editor, cx| editor.style(cx).clone()); let line_height = window .update(cx, |_, window, _| { style.text.line_height_in_pixels(window.rem_size()) @@ -11510,7 +11781,7 @@ mod tests { }); let editor = window.root(cx).unwrap(); - let style = cx.update(|cx| editor.read(cx).style().unwrap().clone()); + let style = editor.update(cx, |editor, cx| editor.style(cx).clone()); let line_height = window .update(cx, |_, window, _| { style.text.line_height_in_pixels(window.rem_size()) @@ -11637,11 +11908,11 @@ mod tests { }); let cx = &mut VisualTestContext::from_window(*window, cx); let editor = window.root(cx).unwrap(); - let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + let style = cx.update(|_, cx| editor.update(cx, |editor, cx| editor.style(cx).clone())); window .update(cx, |editor, window, cx| { - editor.cursor_shape = CursorShape::Block; + editor.cursor_offset_on_selection = true; editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([ Point::new(0, 0)..Point::new(1, 0), @@ -11708,7 +11979,7 @@ mod tests { }); let cx = &mut VisualTestContext::from_window(*window, cx); let editor = window.root(cx).unwrap(); - let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + let style = cx.update(|_, cx| editor.update(cx, |editor, cx| editor.style(cx).clone())); window .update(cx, |editor, window, cx| { editor.set_placeholder_text("hello", window, cx); @@ -11948,7 +12219,7 @@ mod tests { let cx = &mut VisualTestContext::from_window(*window, cx); let editor = window.root(cx).unwrap(); - let style = cx.update(|_, cx| editor.read(cx).style().unwrap().clone()); + let style = editor.update(cx, |editor, cx| editor.style(cx).clone()); window .update(cx, |editor, _, cx| { editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 52eab4f817acad25deebbfa6d807020f9ce1ac80..d1338c3cbd3540914b23a53410fd5c823e1285c8 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -1,11 +1,11 @@ use crate::Editor; -use anyhow::Result; +use anyhow::{Context as _, Result}; use collections::HashMap; -use futures::StreamExt; + use git::{ - GitHostingProviderRegistry, GitRemote, Oid, - blame::{Blame, BlameEntry, ParsedCommitMessage}, - parse_git_remote_url, + GitHostingProviderRegistry, Oid, + blame::{Blame, BlameEntry}, + commit::ParsedCommitMessage, }; use gpui::{ AnyElement, App, AppContext as _, Context, Entity, Hsla, ScrollHandle, Subscription, Task, @@ -67,7 +67,7 @@ impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 { struct GitBlameBuffer { entries: SumTree, buffer_snapshot: BufferSnapshot, - buffer_edits: text::Subscription, + buffer_edits: text::Subscription, commit_details: HashMap, } @@ -494,76 +494,103 @@ impl GitBlame { self.changed_while_blurred = true; return; } - let blame = self.project.update(cx, |project, cx| { - let Some(multi_buffer) = self.multi_buffer.upgrade() else { - return Vec::new(); - }; - multi_buffer - .read(cx) - .all_buffer_ids() - .into_iter() - .filter_map(|id| { - let buffer = multi_buffer.read(cx).buffer(id)?; - let snapshot = buffer.read(cx).snapshot(); - let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); - - let blame_buffer = project.blame_buffer(&buffer, None, cx); - Some(async move { (id, snapshot, buffer_edits, blame_buffer.await) }) - }) - .collect::>() - }); - let provider_registry = GitHostingProviderRegistry::default_global(cx); + let buffers_to_blame = self + .multi_buffer + .update(cx, |multi_buffer, _| { + multi_buffer + .all_buffer_ids() + .into_iter() + .filter_map(|id| Some(multi_buffer.buffer(id)?.downgrade())) + .collect::>() + }) + .unwrap_or_default(); + let project = self.project.downgrade(); self.task = cx.spawn(async move |this, cx| { - let (result, errors) = cx - .background_spawn({ - async move { - let blame = futures::stream::iter(blame) - .buffered(4) - .collect::>() - .await; - let mut res = vec![]; - let mut errors = vec![]; - for (id, snapshot, buffer_edits, blame) in blame { - match blame { - Ok(Some(Blame { - entries, - messages, - remote_url, - })) => { - let entries = build_blame_entry_sum_tree( - entries, - snapshot.max_point().row, - ); - let commit_details = parse_commit_messages( - messages, - remote_url, - provider_registry.clone(), - ) - .await; - - res.push(( + let mut all_results = Vec::new(); + let mut all_errors = Vec::new(); + + for buffers in buffers_to_blame.chunks(4) { + let blame = cx.update(|cx| { + buffers + .iter() + .map(|buffer| { + let buffer = buffer.upgrade().context("buffer was dropped")?; + let project = project.upgrade().context("project was dropped")?; + let id = buffer.read(cx).remote_id(); + let snapshot = buffer.read(cx).snapshot(); + let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); + let remote_url = project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx) + .and_then(|(repo, _)| repo.read(cx).default_remote_url()); + let blame_buffer = project + .update(cx, |project, cx| project.blame_buffer(&buffer, None, cx)); + Ok(async move { + (id, snapshot, buffer_edits, blame_buffer.await, remote_url) + }) + }) + .collect::>>() + })??; + let provider_registry = + cx.update(|cx| GitHostingProviderRegistry::default_global(cx))?; + let (results, errors) = cx + .background_spawn({ + async move { + let blame = futures::future::join_all(blame).await; + let mut res = vec![]; + let mut errors = vec![]; + for (id, snapshot, buffer_edits, blame, remote_url) in blame { + match blame { + Ok(Some(Blame { entries, messages })) => { + let entries = build_blame_entry_sum_tree( + entries, + snapshot.max_point().row, + ); + let commit_details = messages + .into_iter() + .map(|(oid, message)| { + let parsed_commit_message = + ParsedCommitMessage::parse( + oid.to_string(), + message, + remote_url.as_deref(), + Some(provider_registry.clone()), + ); + (oid, parsed_commit_message) + }) + .collect(); + res.push(( + id, + snapshot, + buffer_edits, + Some(entries), + commit_details, + )); + } + Ok(None) => res.push(( id, snapshot, buffer_edits, - Some(entries), - commit_details, - )); + None, + Default::default(), + )), + Err(e) => errors.push(e), } - Ok(None) => { - res.push((id, snapshot, buffer_edits, None, Default::default())) - } - Err(e) => errors.push(e), } + (res, errors) } - (res, errors) - } - }) - .await; + }) + .await; + all_results.extend(results); + all_errors.extend(errors) + } this.update(cx, |this, cx| { this.buffers.clear(); - for (id, snapshot, buffer_edits, entries, commit_details) in result { + for (id, snapshot, buffer_edits, entries, commit_details) in all_results { let Some(entries) = entries else { continue; }; @@ -578,11 +605,11 @@ impl GitBlame { ); } cx.notify(); - if !errors.is_empty() { + if !all_errors.is_empty() { this.project.update(cx, |_, cx| { if this.user_triggered { - log::error!("failed to get git blame data: {errors:?}"); - let notification = errors + log::error!("failed to get git blame data: {all_errors:?}"); + let notification = all_errors .into_iter() .format_with(",", |e, f| f(&format_args!("{:#}", e))) .to_string(); @@ -593,7 +620,7 @@ impl GitBlame { } else { // If we weren't triggered by a user, we just log errors in the background, instead of sending // notifications. - log::debug!("failed to get git blame data: {errors:?}"); + log::debug!("failed to get git blame data: {all_errors:?}"); } }) } @@ -654,55 +681,6 @@ fn build_blame_entry_sum_tree(entries: Vec, max_row: u32) -> SumTree entries } -async fn parse_commit_messages( - messages: impl IntoIterator, - remote_url: Option, - provider_registry: Arc, -) -> HashMap { - let mut commit_details = HashMap::default(); - - let parsed_remote_url = remote_url - .as_deref() - .and_then(|remote_url| parse_git_remote_url(provider_registry, remote_url)); - - for (oid, message) in messages { - let permalink = if let Some((provider, git_remote)) = parsed_remote_url.as_ref() { - Some(provider.build_commit_permalink( - git_remote, - git::BuildCommitPermalinkParams { - sha: oid.to_string().as_str(), - }, - )) - } else { - None - }; - - let remote = parsed_remote_url - .as_ref() - .map(|(provider, remote)| GitRemote { - host: provider.clone(), - owner: remote.owner.clone().into(), - repo: remote.repo.clone().into(), - }); - - let pull_request = parsed_remote_url - .as_ref() - .and_then(|(provider, remote)| provider.extract_pull_request(remote, &message)); - - commit_details.insert( - oid, - ParsedCommitMessage { - message: message.into(), - permalink, - remote, - pull_request, - }, - ); - } - - commit_details -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index 286260e3b0f42da0c3416a07357128ac5e3d0c57..3ead3e2a11348b0f262926bbfe4fb880f0dff663 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -1,11 +1,13 @@ use crate::{Editor, RangeToAnchorExt}; use gpui::{Context, HighlightStyle, Window}; use language::CursorShape; +use multi_buffer::MultiBufferOffset; use theme::ActiveTheme; enum MatchingBracketHighlight {} impl Editor { + #[ztracing::instrument(skip_all)] pub fn refresh_matching_bracket_highlights( &mut self, window: &Window, @@ -15,7 +17,7 @@ impl Editor { let snapshot = self.snapshot(window, cx); let buffer_snapshot = snapshot.buffer_snapshot(); - let newest_selection = self.selections.newest::(&snapshot); + let newest_selection = self.selections.newest::(&snapshot); // Don't highlight brackets if the selection isn't empty if !newest_selection.is_empty() { return; diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 03fce48f146fbfa3bdab93937038c4101a04a484..1c00acbfa9f1a69cbe01c45758db5a0cd4fee757 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -9,8 +9,10 @@ use language::{Bias, ToOffset}; use linkify::{LinkFinder, LinkKind}; use lsp::LanguageServerId; use project::{InlayId, LocationLink, Project, ResolvedPath}; +use regex::Regex; use settings::Settings; -use std::ops::Range; +use std::{ops::Range, sync::LazyLock}; +use text::OffsetRangeExt; use theme::ActiveTheme as _; use util::{ResultExt, TryFutureExt as _, maybe}; @@ -168,7 +170,7 @@ impl Editor { match EditorSettings::get_global(cx).go_to_definition_fallback { GoToDefinitionFallback::None => None, GoToDefinitionFallback::FindAllReferences => { - editor.find_all_references(&FindAllReferences, window, cx) + editor.find_all_references(&FindAllReferences::default(), window, cx) } } }) @@ -216,7 +218,7 @@ impl Editor { self.hide_hovered_link(cx); if !hovered_link_state.links.is_empty() { if !self.focus_handle.is_focused(window) { - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); } // exclude links pointing back to the current anchor @@ -595,7 +597,8 @@ pub(crate) async fn find_file( let project = project?; let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()).ok()?; let scope = snapshot.language_scope_at(position); - let (range, candidate_file_path) = surrounding_filename(snapshot, position)?; + let (range, candidate_file_path) = surrounding_filename(&snapshot, position)?; + let candidate_len = candidate_file_path.len(); async fn check_path( candidate_file_path: &str, @@ -612,29 +615,66 @@ pub(crate) async fn find_file( .filter(|s| s.is_file()) } - if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { - return Some((range, existing_path)); + let pattern_candidates = link_pattern_file_candidates(&candidate_file_path); + + for (pattern_candidate, pattern_range) in &pattern_candidates { + if let Some(existing_path) = check_path(&pattern_candidate, &project, buffer, cx).await { + let offset_range = range.to_offset(&snapshot); + let actual_start = offset_range.start + pattern_range.start; + let actual_end = offset_range.end - (candidate_len - pattern_range.end); + return Some(( + snapshot.anchor_before(actual_start)..snapshot.anchor_after(actual_end), + existing_path, + )); + } } - if let Some(scope) = scope { - for suffix in scope.path_suffixes() { - if candidate_file_path.ends_with(format!(".{suffix}").as_str()) { - continue; - } + for (pattern_candidate, pattern_range) in pattern_candidates { + for suffix in scope.path_suffixes() { + if pattern_candidate.ends_with(format!(".{suffix}").as_str()) { + continue; + } - let suffixed_candidate = format!("{candidate_file_path}.{suffix}"); - if let Some(existing_path) = check_path(&suffixed_candidate, &project, buffer, cx).await - { - return Some((range, existing_path)); + let suffixed_candidate = format!("{pattern_candidate}.{suffix}"); + if let Some(existing_path) = + check_path(&suffixed_candidate, &project, buffer, cx).await + { + let offset_range = range.to_offset(&snapshot); + let actual_start = offset_range.start + pattern_range.start; + let actual_end = offset_range.end - (candidate_len - pattern_range.end); + return Some(( + snapshot.anchor_before(actual_start)..snapshot.anchor_after(actual_end), + existing_path, + )); + } } } } - None } +// Tries to capture potentially inlined links, like those found in markdown, +// e.g. [LinkTitle](link_file.txt) +// Since files can have parens, we should always return the full string +// (literally, [LinkTitle](link_file.txt)) as a candidate. +fn link_pattern_file_candidates(candidate: &str) -> Vec<(String, Range)> { + static MD_LINK_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\(([^)]*)\)").expect("Failed to create REGEX")); + + let candidate_len = candidate.len(); + + let mut candidates = vec![(candidate.to_string(), 0..candidate_len)]; + + if let Some(captures) = MD_LINK_REGEX.captures(candidate) { + if let Some(link) = captures.get(1) { + candidates.push((link.as_str().to_string(), link.range())); + } + } + candidates +} + fn surrounding_filename( - snapshot: language::BufferSnapshot, + snapshot: &language::BufferSnapshot, position: text::Anchor, ) -> Option<(Range, String)> { const LIMIT: usize = 2048; @@ -735,9 +775,10 @@ mod tests { test::editor_lsp_test_context::EditorLspTestContext, }; use futures::StreamExt; - use gpui::Modifiers; + use gpui::{Modifiers, MousePressureEvent, PressureStage}; use indoc::indoc; use lsp::request::{GotoDefinition, GotoTypeDefinition}; + use multi_buffer::MultiBufferOffset; use settings::InlayHintSettingsContent; use util::{assert_set_eq, path}; use workspace::item::Item; @@ -1067,8 +1108,8 @@ mod tests { .clone(); cx.update_editor(|editor, window, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - let anchor_range = snapshot.anchor_before(selection_range.start) - ..snapshot.anchor_after(selection_range.end); + let anchor_range = snapshot.anchor_before(MultiBufferOffset(selection_range.start)) + ..snapshot.anchor_after(MultiBufferOffset(selection_range.end)); editor.change_selections(Default::default(), window, cx, |s| { s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character) }); @@ -1122,7 +1163,7 @@ mod tests { } "})[0] .start; - let hint_position = cx.to_lsp(hint_start_offset); + let hint_position = cx.to_lsp(MultiBufferOffset(hint_start_offset)); let target_range = cx.lsp_range(indoc! {" struct «TestStruct»; @@ -1179,8 +1220,8 @@ mod tests { .unwrap(); let midpoint = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let previous_valid = inlay_range.start.to_display_point(&snapshot); - let next_valid = inlay_range.end.to_display_point(&snapshot); + let previous_valid = MultiBufferOffset(inlay_range.start).to_display_point(&snapshot); + let next_valid = MultiBufferOffset(inlay_range.end).to_display_point(&snapshot); assert_eq!(previous_valid.row(), next_valid.row()); assert!(previous_valid.column() < next_valid.column()); DisplayPoint::new( @@ -1203,7 +1244,7 @@ mod tests { let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); let expected_highlight = InlayHighlight { inlay: InlayId::Hint(0), - inlay_position: buffer_snapshot.anchor_after(inlay_range.start), + inlay_position: buffer_snapshot.anchor_after(MultiBufferOffset(inlay_range.start)), range: 0..hint_label.len(), }; assert_set_eq!(actual_highlights, vec![&expected_highlight]); @@ -1315,6 +1356,58 @@ mod tests { assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into())); } + #[test] + fn test_link_pattern_file_candidates() { + let candidates: Vec = link_pattern_file_candidates("[LinkTitle](link_file.txt)") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!( + candidates, + vec!["[LinkTitle](link_file.txt)", "link_file.txt",] + ); + // Link title with spaces in it + let candidates: Vec = link_pattern_file_candidates("LinkTitle](link_file.txt)") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!( + candidates, + vec!["LinkTitle](link_file.txt)", "link_file.txt",] + ); + + // Link with spaces + let candidates: Vec = link_pattern_file_candidates("LinkTitle](link\\ _file.txt)") + .into_iter() + .map(|(c, _)| c) + .collect(); + + assert_eq!( + candidates, + vec!["LinkTitle](link\\ _file.txt)", "link\\ _file.txt",] + ); + // + // Square brackets not strictly necessary + let candidates: Vec = link_pattern_file_candidates("(link_file.txt)") + .into_iter() + .map(|(c, _)| c) + .collect(); + + assert_eq!(candidates, vec!["(link_file.txt)", "link_file.txt",]); + + // No nesting + let candidates: Vec = + link_pattern_file_candidates("LinkTitle](link_(link_file)file.txt)") + .into_iter() + .map(|(c, _)| c) + .collect(); + + assert_eq!( + candidates, + vec!["LinkTitle](link_(link_file)file.txt)", "link_(link_file",] + ) + } + #[gpui::test] async fn test_surrounding_filename(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); @@ -1373,7 +1466,7 @@ mod tests { (positions, snapshot) }); - let result = surrounding_filename(snapshot, position); + let result = surrounding_filename(&snapshot, position); if let Some(expected) = expected { assert!(result.is_some(), "Failed to find file path: {}", input); @@ -1705,4 +1798,77 @@ mod tests { cx.simulate_click(screen_coord, Modifiers::secondary_key()); cx.update_workspace(|workspace, _, cx| assert_eq!(workspace.items(cx).count(), 1)); } + + #[gpui::test] + async fn test_pressure_links(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + hover_provider: Some(lsp::HoverProviderCapability::Simple(true)), + definition_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + cx, + ) + .await; + + cx.set_state(indoc! {" + fn ˇtest() { do_work(); } + fn do_work() { test(); } + "}); + + // Position the mouse over a symbol that has a definition + let hover_point = cx.pixel_position(indoc! {" + fn test() { do_wˇork(); } + fn do_work() { test(); } + "}); + let symbol_range = cx.lsp_range(indoc! {" + fn test() { «do_work»(); } + fn do_work() { test(); } + "}); + let target_range = cx.lsp_range(indoc! {" + fn test() { do_work(); } + fn «do_work»() { test(); } + "}); + + let mut requests = + cx.set_request_handler::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Link(vec![ + lsp::LocationLink { + origin_selection_range: Some(symbol_range), + target_uri: url.clone(), + target_range, + target_selection_range: target_range, + }, + ]))) + }); + + cx.simulate_mouse_move(hover_point, None, Modifiers::none()); + + // First simulate Normal pressure to set up the previous stage + cx.simulate_event(MousePressureEvent { + pressure: 0.5, + stage: PressureStage::Normal, + position: hover_point, + modifiers: Modifiers::none(), + }); + cx.background_executor.run_until_parked(); + + // Now simulate Force pressure to trigger the force click and go-to definition + cx.simulate_event(MousePressureEvent { + pressure: 1.0, + stage: PressureStage::Force, + position: hover_point, + modifiers: Modifiers::none(), + }); + requests.next().await; + cx.background_executor.run_until_parked(); + + // Assert that we navigated to the definition + cx.assert_editor_state(indoc! {" + fn test() { do_work(); } + fn «do_workˇ»() { test(); } + "}); + } } diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 1da3361f53853a5ea5a9d532b9ee2c05d6010a5d..64415005ec61b1ce942e4fbedaabc70919f5e61d 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -17,7 +17,7 @@ use itertools::Itertools; use language::{DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; -use multi_buffer::{ToOffset, ToPoint}; +use multi_buffer::{MultiBufferOffset, ToOffset, ToPoint}; use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart}; use settings::Settings; use std::{borrow::Cow, cell::RefCell}; @@ -106,7 +106,7 @@ pub fn find_hovered_hint_part( hovered_offset: InlayOffset, ) -> Option<(InlayHintLabelPart, Range)> { if hovered_offset >= hint_start { - let mut hovered_character = (hovered_offset - hint_start).0; + let mut hovered_character = hovered_offset - hint_start; let mut part_start = hint_start; for part in label_parts { let part_len = part.value.chars().count(); @@ -151,7 +151,7 @@ pub fn hover_at_inlay( false }) { - hide_hover(editor, cx); + return; } let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; @@ -316,12 +316,12 @@ fn show_hover( } else { snapshot .buffer_snapshot() - .diagnostics_with_buffer_ids_in_range::(offset..offset) + .diagnostics_with_buffer_ids_in_range::(offset..offset) .filter(|(_, diagnostic)| { Some(diagnostic.diagnostic.group_id) != active_group_id }) // Find the entry with the most specific range - .min_by_key(|(_, entry)| entry.range.len()) + .min_by_key(|(_, entry)| entry.range.end - entry.range.start) }; let diagnostic_popover = if let Some((buffer_id, local_diagnostic)) = local_diagnostic { @@ -341,7 +341,13 @@ fn show_hover( renderer .as_ref() .and_then(|renderer| { - renderer.render_hover(group, point_range, buffer_id, cx) + renderer.render_hover( + group, + point_range, + buffer_id, + language_registry.clone(), + cx, + ) }) .context("no rendered diagnostic") })??; @@ -512,7 +518,7 @@ fn show_hover( // Highlight the selected symbol using a background highlight editor.highlight_background::( &hover_highlights, - |theme| theme.colors().element_hover, // todo update theme + |_, theme| theme.colors().element_hover, // todo update theme cx, ); } @@ -601,23 +607,30 @@ async fn parse_blocks( pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { let settings = ThemeSettings::get_global(cx); let ui_font_family = settings.ui_font.family.clone(); + let ui_font_features = settings.ui_font.features.clone(); let ui_font_fallbacks = settings.ui_font.fallbacks.clone(); let buffer_font_family = settings.buffer_font.family.clone(); + let buffer_font_features = settings.buffer_font.features.clone(); let buffer_font_fallbacks = settings.buffer_font.fallbacks.clone(); let mut base_text_style = window.text_style(); base_text_style.refine(&TextStyleRefinement { font_family: Some(ui_font_family), + font_features: Some(ui_font_features), font_fallbacks: ui_font_fallbacks, color: Some(cx.theme().colors().editor_foreground), ..Default::default() }); MarkdownStyle { base_text_style, - code_block: StyleRefinement::default().my(rems(1.)).font_buffer(cx), + code_block: StyleRefinement::default() + .my(rems(1.)) + .font_buffer(cx) + .font_features(buffer_font_features.clone()), inline_code: TextStyleRefinement { background_color: Some(cx.theme().colors().background), font_family: Some(buffer_font_family), + font_features: Some(buffer_font_features), font_fallbacks: buffer_font_fallbacks, ..Default::default() }, @@ -643,6 +656,7 @@ pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { .text_base() .mt(rems(1.)) .mb_0(), + table_columns_min_size: true, ..Default::default() } } @@ -651,12 +665,15 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { let settings = ThemeSettings::get_global(cx); let ui_font_family = settings.ui_font.family.clone(); let ui_font_fallbacks = settings.ui_font.fallbacks.clone(); + let ui_font_features = settings.ui_font.features.clone(); let buffer_font_family = settings.buffer_font.family.clone(); + let buffer_font_features = settings.buffer_font.features.clone(); let buffer_font_fallbacks = settings.buffer_font.fallbacks.clone(); let mut base_text_style = window.text_style(); base_text_style.refine(&TextStyleRefinement { font_family: Some(ui_font_family), + font_features: Some(ui_font_features), font_fallbacks: ui_font_fallbacks, color: Some(cx.theme().colors().editor_foreground), ..Default::default() @@ -667,6 +684,7 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { inline_code: TextStyleRefinement { background_color: Some(cx.theme().colors().editor_background.opacity(0.5)), font_family: Some(buffer_font_family), + font_features: Some(buffer_font_features), font_fallbacks: buffer_font_fallbacks, ..Default::default() }, @@ -692,6 +710,7 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { .font_weight(FontWeight::BOLD) .text_base() .mb_0(), + table_columns_min_size: true, ..Default::default() } } @@ -887,7 +906,6 @@ impl InfoPopover { *keyboard_grace = false; cx.stop_propagation(); }) - .p_2() .when_some(self.parsed_content.clone(), |this, markdown| { this.child( div() @@ -903,12 +921,13 @@ impl InfoPopover { copy_button_on_hover: false, border: false, }) - .on_url_click(open_markdown_url), + .on_url_click(open_markdown_url) + .p_2(), ), ) .custom_scrollbars( Scrollbars::for_settings::() - .tracked_scroll_handle(self.scroll_handle.clone()), + .tracked_scroll_handle(&self.scroll_handle), window, cx, ) @@ -986,6 +1005,11 @@ impl DiagnosticPopover { self.markdown.clone(), diagnostics_markdown_style(window, cx), ) + .code_block_renderer(markdown::CodeBlockRenderer::Default { + copy_button: false, + copy_button_on_hover: false, + border: false, + }) .on_url_click( move |link, window, cx| { if let Some(renderer) = GlobalDiagnosticRenderer::global(cx) @@ -1001,7 +1025,7 @@ impl DiagnosticPopover { ) .custom_scrollbars( Scrollbars::for_settings::() - .tracked_scroll_handle(self.scroll_handle.clone()), + .tracked_scroll_handle(&self.scroll_handle), window, cx, ), @@ -1622,7 +1646,7 @@ mod tests { } "})[0] .start; - let hint_position = cx.to_lsp(hint_start_offset); + let hint_position = cx.to_lsp(MultiBufferOffset(hint_start_offset)); let new_type_target_range = cx.lsp_range(indoc! {" struct TestStruct; @@ -1697,8 +1721,8 @@ mod tests { .unwrap(); let new_type_hint_part_hover_position = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let previous_valid = inlay_range.start.to_display_point(&snapshot); - let next_valid = inlay_range.end.to_display_point(&snapshot); + let previous_valid = MultiBufferOffset(inlay_range.start).to_display_point(&snapshot); + let next_valid = MultiBufferOffset(inlay_range.end).to_display_point(&snapshot); assert_eq!(previous_valid.row(), next_valid.row()); assert!(previous_valid.column() < next_valid.column()); let exact_unclipped = DisplayPoint::new( @@ -1808,7 +1832,8 @@ mod tests { popover.symbol_range, RangeInEditor::Inlay(InlayHighlight { inlay: InlayId::Hint(0), - inlay_position: buffer_snapshot.anchor_after(inlay_range.start), + inlay_position: buffer_snapshot + .anchor_after(MultiBufferOffset(inlay_range.start)), range: ": ".len()..": ".len() + new_type_label.len(), }), "Popover range should match the new type label part" @@ -1821,8 +1846,8 @@ mod tests { let struct_hint_part_hover_position = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let previous_valid = inlay_range.start.to_display_point(&snapshot); - let next_valid = inlay_range.end.to_display_point(&snapshot); + let previous_valid = MultiBufferOffset(inlay_range.start).to_display_point(&snapshot); + let next_valid = MultiBufferOffset(inlay_range.end).to_display_point(&snapshot); assert_eq!(previous_valid.row(), next_valid.row()); assert!(previous_valid.column() < next_valid.column()); let exact_unclipped = DisplayPoint::new( @@ -1862,7 +1887,8 @@ mod tests { popover.symbol_range, RangeInEditor::Inlay(InlayHighlight { inlay: InlayId::Hint(0), - inlay_position: buffer_snapshot.anchor_after(inlay_range.start), + inlay_position: buffer_snapshot + .anchor_after(MultiBufferOffset(inlay_range.start)), range: ": ".len() + new_type_label.len() + "<".len() ..": ".len() + new_type_label.len() + "<".len() + struct_label.len(), }), diff --git a/crates/editor/src/indent_guides.rs b/crates/editor/src/indent_guides.rs index 7c392d27531472a413ce4d32d09cce4eb722e462..f186f9da77aca5a0d34cdc05272032f93862b1d2 100644 --- a/crates/editor/src/indent_guides.rs +++ b/crates/editor/src/indent_guides.rs @@ -181,6 +181,10 @@ pub fn indent_guides_in_range( .buffer_snapshot() .indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx) .filter(|indent_guide| { + if editor.has_indent_guides_disabled_for_buffer(indent_guide.buffer_id) { + return false; + } + if editor.is_buffer_folded(indent_guide.buffer_id, cx) { return false; } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 50d4ed8df9871902033bfb4b55d85eed4ace51f1..18bbc56005a8ca01fedc7a5e17ae5ec229f48426 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -1,4 +1,5 @@ use std::{ + collections::hash_map, ops::{ControlFlow, Range}, time::Duration, }; @@ -290,7 +291,7 @@ impl Editor { }), }; - let mut visible_excerpts = self.visible_excerpts(cx); + let mut visible_excerpts = self.visible_excerpts(true, cx); let mut invalidate_hints_for_buffers = HashSet::default(); let ignore_previous_fetches = match reason { InlayHintRefreshReason::ModifiersChanged(_) @@ -583,8 +584,11 @@ impl Editor { }) .max_by_key(|hint| hint.id) { - if let Some(ResolvedHint::Resolved(cached_hint)) = - hovered_hint.position.buffer_id.and_then(|buffer_id| { + if let Some(ResolvedHint::Resolved(cached_hint)) = hovered_hint + .position + .text_anchor + .buffer_id + .and_then(|buffer_id| { lsp_store.update(cx, |lsp_store, cx| { lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx) }) @@ -644,9 +648,9 @@ impl Editor { ) { let highlight_start = - (part_range.start - hint_start).0 + extra_shift_left; + (part_range.start - hint_start) + extra_shift_left; let highlight_end = - (part_range.end - hint_start).0 + extra_shift_right; + (part_range.end - hint_start) + extra_shift_right; let highlight = InlayHighlight { inlay: hovered_hint.id, inlay_position: hovered_hint.position, @@ -756,7 +760,7 @@ impl Editor { let visible_inlay_hint_ids = self .visible_inlay_hints(cx) .iter() - .filter(|inlay| inlay.position.buffer_id == Some(buffer_id)) + .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id)) .map(|inlay| inlay.id) .collect::>(); let Some(inlay_hints) = &mut self.inlay_hints else { @@ -778,6 +782,7 @@ impl Editor { } let excerpts = self.buffer.read(cx).excerpt_ids(); + let mut inserted_hint_text = HashMap::default(); let hints_to_insert = new_hints .into_iter() .filter_map(|(chunk_range, hints_result)| { @@ -804,8 +809,35 @@ impl Editor { } } }) - .flat_map(|hints| hints.into_values()) - .flatten() + .flat_map(|new_hints| { + let mut hints_deduplicated = Vec::new(); + + if new_hints.len() > 1 { + for (server_id, new_hints) in new_hints { + for (new_id, new_hint) in new_hints { + let hints_text_for_position = inserted_hint_text + .entry(new_hint.position) + .or_insert_with(HashMap::default); + let insert = + match hints_text_for_position.entry(new_hint.text().to_string()) { + hash_map::Entry::Occupied(o) => o.get() == &server_id, + hash_map::Entry::Vacant(v) => { + v.insert(server_id); + true + } + }; + + if insert { + hints_deduplicated.push((new_id, new_hint)); + } + } + } + } else { + hints_deduplicated.extend(new_hints.into_values().flatten()); + } + + hints_deduplicated + }) .filter_map(|(hint_id, lsp_hint)| { if inlay_hints.allowed_hint_kinds.contains(&lsp_hint.kind) && inlay_hints @@ -829,9 +861,13 @@ impl Editor { self.visible_inlay_hints(cx) .iter() .filter(|inlay| { - inlay.position.buffer_id.is_none_or(|buffer_id| { - invalidate_hints_for_buffers.contains(&buffer_id) - }) + inlay + .position + .text_anchor + .buffer_id + .is_none_or(|buffer_id| { + invalidate_hints_for_buffers.contains(&buffer_id) + }) }) .map(|inlay| inlay.id), ); @@ -912,14 +948,14 @@ pub mod tests { use crate::{ExcerptRange, scroll::Autoscroll}; use collections::HashSet; use futures::{StreamExt, future}; - use gpui::{AppContext as _, Context, SemanticVersion, TestAppContext, WindowHandle}; + use gpui::{AppContext as _, Context, TestAppContext, WindowHandle}; use itertools::Itertools as _; use language::language_settings::InlayHintKind; use language::{Capability, FakeLspAdapter}; use language::{Language, LanguageConfig, LanguageMatcher}; use languages::rust_lang; use lsp::FakeLanguageServer; - use multi_buffer::MultiBuffer; + use multi_buffer::{MultiBuffer, MultiBufferOffset}; use parking_lot::Mutex; use pretty_assertions::assert_eq; use project::{FakeFs, Project}; @@ -1000,7 +1036,7 @@ pub mod tests { editor .update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input("some change", window, cx); }) @@ -1400,7 +1436,7 @@ pub mod tests { rs_editor .update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input("some rs change", window, cx); }) @@ -1432,7 +1468,7 @@ pub mod tests { md_editor .update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input("some md change", window, cx); }) @@ -1880,7 +1916,7 @@ pub mod tests { editor .update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input(change_after_opening, window, cx); }) @@ -1926,7 +1962,7 @@ pub mod tests { task_editor .update(&mut cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([13..13]) + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) }); editor.handle_input(async_later_change, window, cx); }) @@ -2175,7 +2211,7 @@ pub mod tests { cx: &mut gpui::TestAppContext, ) -> Range { let ranges = editor - .update(cx, |editor, _window, cx| editor.visible_excerpts(cx)) + .update(cx, |editor, _window, cx| editor.visible_excerpts(true, cx)) .unwrap(); assert_eq!( ranges.len(), @@ -2677,7 +2713,7 @@ let c = 3;"# let mut editor = Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx); editor.change_selections(SelectionEffects::default(), window, cx, |s| { - s.select_ranges([0..0]) + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) }); editor }); @@ -3732,6 +3768,7 @@ let c = 3;"# let mut fake_servers = language_registry.register_fake_lsp( "Rust", FakeLspAdapter { + name: "rust-analyzer", capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), ..lsp::ServerCapabilities::default() @@ -3804,6 +3841,78 @@ let c = 3;"# }, ); + // Add another server that does send the same, duplicate hints back + let mut fake_servers_2 = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "CrabLang-ls", + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(move |fake_server| { + fake_server.set_request_handler::( + move |params, _| async move { + if params.text_document.uri + == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() + { + Ok(Some(vec![ + lsp::InlayHint { + position: lsp::Position::new(1, 9), + label: lsp::InlayHintLabel::String(": i32".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(19, 9), + label: lsp::InlayHintLabel::String(": i33".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + ])) + } else if params.text_document.uri + == lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap() + { + Ok(Some(vec![ + lsp::InlayHint { + position: lsp::Position::new(1, 10), + label: lsp::InlayHintLabel::String(": i34".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + lsp::InlayHint { + position: lsp::Position::new(29, 10), + label: lsp::InlayHintLabel::String(": i35".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }, + ])) + } else { + panic!("Unexpected file path {:?}", params.text_document.uri); + } + }, + ); + })), + ..FakeLspAdapter::default() + }, + ); + let (buffer_1, _handle_1) = project .update(cx, |project, cx| { project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) @@ -3847,6 +3956,7 @@ let c = 3;"# }); let fake_server = fake_servers.next().await.unwrap(); + let _fake_server_2 = fake_servers_2.next().await.unwrap(); cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); @@ -3855,11 +3965,16 @@ let c = 3;"# assert_eq!( vec![ ": i32".to_string(), + ": i32".to_string(), + ": i33".to_string(), ": i33".to_string(), ": i34".to_string(), + ": i34".to_string(), + ": i35".to_string(), ": i35".to_string(), ], sorted_cached_hint_labels(editor, cx), + "We receive duplicate hints from 2 servers and cache them all" ); assert_eq!( vec![ @@ -3869,7 +3984,7 @@ let c = 3;"# ": i33".to_string(), ], visible_hint_labels(editor, cx), - "lib.rs is added before main.rs , so its excerpts should be visible first" + "lib.rs is added before main.rs , so its excerpts should be visible first; hints should be deduplicated per label" ); }) .unwrap(); @@ -3919,8 +4034,12 @@ let c = 3;"# assert_eq!( vec![ ": i32".to_string(), + ": i32".to_string(), + ": i33".to_string(), ": i33".to_string(), ": i34".to_string(), + ": i34".to_string(), + ": i35".to_string(), ": i35".to_string(), ], sorted_cached_hint_labels(editor, cx), @@ -3950,7 +4069,7 @@ let c = 3;"# let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); theme::init(theme::LoadThemes::JustBase, cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); crate::init(cx); }); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 12590e4b3f95648dd653d408252ced460e2e834e..cfbb7c975c844f08d76a5568f1e02dfe3d7d74f1 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -21,8 +21,9 @@ use language::{ SelectionGoal, proto::serialize_anchor as serialize_text_anchor, }; use lsp::DiagnosticSeverity; +use multi_buffer::MultiBufferOffset; use project::{ - Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger, + File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, }; use rpc::proto::{self, update_view}; @@ -454,21 +455,13 @@ async fn update_editor_from_message( })??; // Deserialize the editor state. - let (selections, pending_selection, scroll_top_anchor) = this.update(cx, |editor, cx| { - let buffer = editor.buffer.read(cx).read(cx); - let selections = message - .selections - .into_iter() - .filter_map(|selection| deserialize_selection(&buffer, selection)) - .collect::>(); - let pending_selection = message - .pending_selection - .and_then(|selection| deserialize_selection(&buffer, selection)); - let scroll_top_anchor = message - .scroll_top_anchor - .and_then(|anchor| deserialize_anchor(&buffer, anchor)); - anyhow::Ok((selections, pending_selection, scroll_top_anchor)) - })??; + let selections = message + .selections + .into_iter() + .filter_map(deserialize_selection) + .collect::>(); + let pending_selection = message.pending_selection.and_then(deserialize_selection); + let scroll_top_anchor = message.scroll_top_anchor.and_then(deserialize_anchor); // Wait until the buffer has received all of the operations referenced by // the editor's new state. @@ -562,24 +555,20 @@ fn deserialize_excerpt_range( )) } -fn deserialize_selection( - buffer: &MultiBufferSnapshot, - selection: proto::Selection, -) -> Option> { +fn deserialize_selection(selection: proto::Selection) -> Option> { Some(Selection { id: selection.id as usize, - start: deserialize_anchor(buffer, selection.start?)?, - end: deserialize_anchor(buffer, selection.end?)?, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, }) } -fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor) -> Option { +fn deserialize_anchor(anchor: proto::EditorAnchor) -> Option { let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id); Some(Anchor::in_buffer( excerpt_id, - buffer.buffer_id_for_excerpt(excerpt_id)?, language::proto::deserialize_anchor(anchor.anchor?)?, )) } @@ -587,6 +576,21 @@ fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor) impl Item for Editor { type Event = EditorEvent; + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + cx: &'a App, + ) -> Option { + if TypeId::of::() == type_id { + Some(self_handle.clone().into()) + } else if TypeId::of::() == type_id { + Some(self_handle.read(cx).buffer.clone().into()) + } else { + None + } + } + fn navigate( &mut self, data: Box, @@ -629,18 +633,20 @@ impl Item for Editor { } fn tab_tooltip_text(&self, cx: &App) -> Option { - let file_path = self - .buffer() + self.buffer() .read(cx) - .as_singleton()? - .read(cx) - .file() - .and_then(|f| f.as_local())? - .abs_path(cx); - - let file_path = file_path.compact().to_string_lossy().into_owned(); - - Some(file_path.into()) + .as_singleton() + .and_then(|buffer| buffer.read(cx).file()) + .and_then(|file| File::from_dyn(Some(file))) + .map(|file| { + file.worktree + .read(cx) + .absolutize(&file.path) + .compact() + .to_string_lossy() + .into_owned() + .into() + }) } fn telemetry_event_text(&self) -> Option<&'static str> { @@ -836,7 +842,6 @@ impl Item for Editor { .map(|handle| handle.read(cx).base_buffer().unwrap_or(handle.clone())) .collect::>(); - // let mut buffers_to_save = let buffers_to_save = if self.buffer.read(cx).is_singleton() && !options.autosave { buffers } else { @@ -923,7 +928,11 @@ impl Item for Editor { }) } - fn as_searchable(&self, handle: &Entity) -> Option> { + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { Some(Box::new(handle.clone())) } @@ -941,7 +950,7 @@ impl Item for Editor { fn breadcrumbs(&self, variant: &Theme, cx: &App) -> Option> { let cursor = self.selections.newest_anchor().head(); - let multibuffer = &self.buffer().read(cx); + let multibuffer = self.buffer().read(cx); let (buffer_id, symbols) = multibuffer .read(cx) .symbols_containing(cursor, Some(variant.syntax()))?; @@ -1356,7 +1365,7 @@ impl ProjectItem for Editor { cx: &mut Context, ) -> Self { let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx); - if let Some((excerpt_id, buffer_id, snapshot)) = + if let Some((excerpt_id, _, snapshot)) = editor.buffer().read(cx).snapshot(cx).as_singleton() && WorkspaceSettings::get(None, cx).restore_on_file_reopen && let Some(restoration_data) = Self::project_item_kind() @@ -1379,11 +1388,8 @@ impl ProjectItem for Editor { }); } let (top_row, offset) = restoration_data.scroll_position; - let anchor = Anchor::in_buffer( - *excerpt_id, - buffer_id, - snapshot.anchor_before(Point::new(top_row, 0)), - ); + let anchor = + Anchor::in_buffer(*excerpt_id, snapshot.anchor_before(Point::new(top_row, 0))); editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx); } @@ -1480,6 +1486,7 @@ impl SearchableItem for Editor { fn update_matches( &mut self, matches: &[Range], + active_match_index: Option, _: &mut Window, cx: &mut Context, ) { @@ -1490,7 +1497,13 @@ impl SearchableItem for Editor { let updated = existing_range != Some(matches); self.highlight_background::( matches, - |theme| theme.colors().search_match_background, + move |index, theme| { + if active_match_index == Some(*index) { + theme.colors().search_active_match_background + } else { + theme.colors().search_match_background + } + }, cx, ); if updated { @@ -1586,12 +1599,11 @@ impl SearchableItem for Editor { &mut self, index: usize, matches: &[Range], - collapse: bool, window: &mut Window, cx: &mut Context, ) { self.unfold_ranges(&[matches[index].clone()], false, true, cx); - let range = self.range_for_match(&matches[index], collapse); + let range = self.range_for_match(&matches[index]); let autoscroll = if EditorSettings::get_global(cx).search.center_on_match { Autoscroll::center() } else { @@ -1736,7 +1748,7 @@ impl SearchableItem for Editor { let mut ranges = Vec::new(); let search_within_ranges = if search_within_ranges.is_empty() { - vec![buffer.anchor_before(0)..buffer.anchor_after(buffer.len())] + vec![buffer.anchor_before(MultiBufferOffset(0))..buffer.anchor_after(buffer.len())] } else { search_within_ranges }; @@ -1747,7 +1759,10 @@ impl SearchableItem for Editor { { ranges.extend( query - .search(search_buffer, Some(search_range.clone())) + .search( + search_buffer, + Some(search_range.start.0..search_range.end.0), + ) .await .into_iter() .map(|match_range| { @@ -1763,11 +1778,7 @@ impl SearchableItem for Editor { .anchor_after(search_range.start + match_range.start); let end = search_buffer .anchor_before(search_range.start + match_range.end); - Anchor::range_in_buffer( - excerpt_id, - search_buffer.remote_id(), - start..end, - ) + Anchor::range_in_buffer(excerpt_id, start..end) } }), ); @@ -1886,15 +1897,20 @@ fn path_for_buffer<'a>( cx: &'a App, ) -> Option> { let file = buffer.read(cx).as_singleton()?.read(cx).file()?; - path_for_file(file.as_ref(), height, include_filename, cx) + path_for_file(file, height, include_filename, cx) } fn path_for_file<'a>( - file: &'a dyn language::File, + file: &'a Arc, mut height: usize, include_filename: bool, cx: &'a App, ) -> Option> { + if project::File::from_dyn(Some(file)).is_none() { + return None; + } + + let file = file.as_ref(); // Ensure we always render at least the filename. height += 1; @@ -1934,18 +1950,18 @@ mod tests { use super::*; use fs::MTime; use gpui::{App, VisualTestContext}; - use language::{LanguageMatcher, TestFile}; + use language::TestFile; use project::FakeFs; use std::path::{Path, PathBuf}; use util::{path, rel_path::RelPath}; #[gpui::test] fn test_path_for_file(cx: &mut App) { - let file = TestFile { + let file: Arc = Arc::new(TestFile { path: RelPath::empty().into(), root_name: String::new(), local_root: None, - }; + }); assert_eq!(path_for_file(&file, 0, false, cx), None); } @@ -1974,20 +1990,6 @@ mod tests { .unwrap() } - fn rust_language() -> Arc { - Arc::new(language::Language::new( - language::LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - )) - } - #[gpui::test] async fn test_deserialize(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); @@ -2069,7 +2071,9 @@ mod tests { { let project = Project::test(fs.clone(), [path!("/file.rs").as_ref()], cx).await; // Add Rust to the language, so that we can restore the language of the buffer - project.read_with(cx, |project, _| project.languages().add(rust_language())); + project.read_with(cx, |project, _| { + project.languages().add(languages::rust_lang()) + }); let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index 0e32bc686ad98a45b83712841c13fffc07421acb..1d808c968d579569fb595a5a1a0ddaa4dbc718b3 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -1,7 +1,7 @@ use anyhow::{Context as _, Result, anyhow}; use collections::HashMap; use gpui::{Context, Entity, Window}; -use multi_buffer::{MultiBuffer, ToOffset}; +use multi_buffer::{BufferOffset, MultiBuffer, ToOffset}; use std::ops::Range; use util::ResultExt as _; @@ -19,7 +19,7 @@ pub struct JsxTagCompletionState { /// that corresponds to the tag name /// Note that this is not configurable, i.e. we assume the first /// named child of a tag node is the tag name -const TS_NODE_TAG_NAME_CHILD_INDEX: usize = 0; +const TS_NODE_TAG_NAME_CHILD_INDEX: u32 = 0; /// Maximum number of parent elements to walk back when checking if an open tag /// is already closed. @@ -546,9 +546,10 @@ pub(crate) fn handle_from( if edit_range_offset.start != edit_range_offset.end { continue; } - if let Some(selection) = - buffer_selection_map.get_mut(&(edit_range_offset.start, edit_range_offset.end)) - { + if let Some(selection) = buffer_selection_map.get_mut(&( + BufferOffset(edit_range_offset.start), + BufferOffset(edit_range_offset.end), + )) { if selection.0.head().bias() != text::Bias::Right || selection.0.tail().bias() != text::Bias::Right { @@ -621,7 +622,7 @@ mod jsx_tag_autoclose_tests { use super::*; use gpui::{AppContext as _, TestAppContext}; use languages::language; - use multi_buffer::ExcerptRange; + use multi_buffer::{ExcerptRange, MultiBufferOffset}; use text::Selection; async fn test_setup(cx: &mut TestAppContext) -> EditorTestContext { @@ -842,9 +843,9 @@ mod jsx_tag_autoclose_tests { cx.update_editor(|editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { selections.select(vec![ - Selection::from_offset(4), - Selection::from_offset(9), - Selection::from_offset(15), + Selection::from_offset(MultiBufferOffset(4)), + Selection::from_offset(MultiBufferOffset(9)), + Selection::from_offset(MultiBufferOffset(15)), ]) }) }); diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index ab16fe7eb4bce28ef6bfee2c2bde1d52fda86561..ff3096961d646a2a98458319d927a4e2723d0602 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -1,6 +1,7 @@ use collections::HashMap; use gpui::{AppContext, Context, Window}; use itertools::Itertools; +use multi_buffer::MultiBufferOffset; use std::{ops::Range, time::Duration}; use text::{AnchorRangeExt, BufferId, ToPoint}; use util::ResultExt; @@ -60,15 +61,17 @@ pub(super) fn refresh_linked_ranges( editor .update(cx, |editor, cx| { let display_snapshot = editor.display_snapshot(cx); - let selections = editor.selections.all::(&display_snapshot); + let selections = editor + .selections + .all::(&display_snapshot); let snapshot = display_snapshot.buffer_snapshot(); let buffer = editor.buffer.read(cx); for selection in selections { let cursor_position = selection.head(); let start_position = snapshot.anchor_before(cursor_position); let end_position = snapshot.anchor_after(selection.tail()); - if start_position.buffer_id != end_position.buffer_id - || end_position.buffer_id.is_none() + if start_position.text_anchor.buffer_id != end_position.text_anchor.buffer_id + || end_position.text_anchor.buffer_id.is_none() { // Throw away selections spanning multiple buffers. continue; diff --git a/crates/editor/src/lsp_colors.rs b/crates/editor/src/lsp_colors.rs index 050363f219ee5579a73cf168cce82778df8810ab..2a98ad6bd471d7a2745fa30b0e575b2a6db157d0 100644 --- a/crates/editor/src/lsp_colors.rs +++ b/crates/editor/src/lsp_colors.rs @@ -164,7 +164,7 @@ impl Editor { } let visible_buffers = self - .visible_excerpts(cx) + .visible_excerpts(true, cx) .into_values() .map(|(buffer, ..)| buffer) .filter(|editor_buffer| { diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index 36353e8d42527cd59043ab3cf2b6105c534412d9..37cc734ab1ef0a0b677b3e405ff70b461d349a1c 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -37,7 +37,7 @@ where .selections .disjoint_anchors_arc() .iter() - .filter_map(|selection| Some((selection.head(), selection.head().buffer_id?))) + .filter_map(|selection| Some((selection.head(), selection.head().text_anchor.buffer_id?))) .unique_by(|(_, buffer_id)| *buffer_id) .find_map(|(trigger_anchor, buffer_id)| { let buffer = editor.buffer().read(cx).buffer(buffer_id)?; diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 2a63e39adda52734b301eda0d32a5bfa10a8e47e..7314991bd5e4842f395383888a87b4e2db7e0a0c 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -59,7 +59,7 @@ impl MouseContextMenu { x: editor.gutter_dimensions.width, y: Pixels::ZERO, }; - let source_position = editor.to_pixel_point(source, &editor_snapshot, window)?; + let source_position = editor.to_pixel_point(source, &editor_snapshot, window, cx)?; let menu_position = MenuPosition::PinnedToEditor { source, offset: position - (source_position + content_origin), @@ -81,14 +81,26 @@ impl MouseContextMenu { cx: &mut Context, ) -> Self { let context_menu_focus = context_menu.focus_handle(cx); - window.focus(&context_menu_focus); + + // Since `ContextMenu` is rendered in a deferred fashion its focus + // handle is not linked to the Editor's until after the deferred draw + // callback runs. + // We need to wait for that to happen before focusing it, so that + // calling `contains_focused` on the editor's focus handle returns + // `true` when the `ContextMenu` is focused. + let focus_handle = context_menu_focus.clone(); + cx.on_next_frame(window, move |_, window, cx| { + cx.on_next_frame(window, move |_, window, cx| { + window.focus(&focus_handle, cx); + }); + }); let _dismiss_subscription = cx.subscribe_in(&context_menu, window, { let context_menu_focus = context_menu_focus.clone(); move |editor, _, _event: &DismissEvent, window, cx| { editor.mouse_context_menu.take(); if context_menu_focus.contains_focused(window, cx) { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); } } }); @@ -115,7 +127,7 @@ impl MouseContextMenu { } editor.mouse_context_menu.take(); if context_menu_focus.contains_focused(window, cx) { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); } }, ); @@ -149,7 +161,7 @@ pub fn deploy_context_menu( cx: &mut Context, ) { if !editor.is_focused(window) { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); } // Don't show context menu for inline editors @@ -223,7 +235,10 @@ pub fn deploy_context_menu( .action("Go to Declaration", Box::new(GoToDeclaration)) .action("Go to Type Definition", Box::new(GoToTypeDefinition)) .action("Go to Implementation", Box::new(GoToImplementation)) - .action("Find All References", Box::new(FindAllReferences)) + .action( + "Find All References", + Box::new(FindAllReferences::default()), + ) .separator() .action("Rename Symbol", Box::new(Rename)) .action("Format Buffer", Box::new(Format)) @@ -264,6 +279,11 @@ pub fn deploy_context_menu( !has_git_repo, "Copy Permalink", Box::new(CopyPermalinkToLine), + ) + .action_disabled_when( + !has_git_repo, + "View File History", + Box::new(git::FileHistory), ); match focus { Some(focus) => builder.context(focus), @@ -329,8 +349,18 @@ mod tests { } "}); cx.editor(|editor, _window, _app| assert!(editor.mouse_context_menu.is_none())); + cx.update_editor(|editor, window, cx| { - deploy_context_menu(editor, Some(Default::default()), point, window, cx) + deploy_context_menu(editor, Some(Default::default()), point, window, cx); + + // Assert that, even after deploying the editor's mouse context + // menu, the editor's focus handle still contains the focused + // element. The pane's tab bar relies on this to determine whether + // to show the tab bar buttons and there was a small flicker when + // deploying the mouse context menu that would cause this to not be + // true, making it so that the buttons would disappear for a couple + // of frames. + assert!(editor.focus_handle.contains_focused(window, cx)); }); cx.assert_editor_state(indoc! {" diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index a83545aaf26b0e148345c185f4f39910e97a727e..8635d89ed13e77d260307667740bf79ab4022e6f 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -8,7 +8,7 @@ use crate::{ }; use gpui::{Pixels, WindowTextSystem}; use language::{CharClassifier, Point}; -use multi_buffer::{MultiBufferRow, MultiBufferSnapshot}; +use multi_buffer::{MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot}; use serde::Deserialize; use workspace::searchable::Direction; @@ -358,28 +358,28 @@ pub fn adjust_greedy_deletion( let mut whitespace_sequences = Vec::new(); let mut current_offset = trimmed_delete_range.start; - let mut whitespace_sequence_length = 0; - let mut whitespace_sequence_start = 0; + let mut whitespace_sequence_length = MultiBufferOffset(0); + let mut whitespace_sequence_start = MultiBufferOffset(0); for ch in map .buffer_snapshot() .text_for_range(trimmed_delete_range.clone()) .flat_map(str::chars) { if ch.is_whitespace() { - if whitespace_sequence_length == 0 { + if whitespace_sequence_length == MultiBufferOffset(0) { whitespace_sequence_start = current_offset; } whitespace_sequence_length += 1; } else { - if whitespace_sequence_length >= 2 { + if whitespace_sequence_length >= MultiBufferOffset(2) { whitespace_sequences.push((whitespace_sequence_start, current_offset)); } - whitespace_sequence_start = 0; - whitespace_sequence_length = 0; + whitespace_sequence_start = MultiBufferOffset(0); + whitespace_sequence_length = MultiBufferOffset(0); } current_offset += ch.len_utf8(); } - if whitespace_sequence_length >= 2 { + if whitespace_sequence_length >= MultiBufferOffset(2) { whitespace_sequences.push((whitespace_sequence_start, current_offset)); } @@ -731,7 +731,7 @@ pub fn find_preceding_boundary_trail( } let trail = trail_offset - .map(|trail_offset: usize| map.clip_point(trail_offset.to_display_point(map), Bias::Left)); + .map(|trail_offset| map.clip_point(trail_offset.to_display_point(map), Bias::Left)); ( trail, @@ -779,7 +779,7 @@ pub fn find_boundary_trail( } let trail = trail_offset - .map(|trail_offset: usize| map.clip_point(trail_offset.to_display_point(map), Bias::Right)); + .map(|trail_offset| map.clip_point(trail_offset.to_display_point(map), Bias::Right)); ( trail, @@ -810,8 +810,8 @@ pub fn find_boundary_exclusive( /// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer. pub fn chars_after( map: &DisplaySnapshot, - mut offset: usize, -) -> impl Iterator)> + '_ { + mut offset: MultiBufferOffset, +) -> impl Iterator)> + '_ { map.buffer_snapshot().chars_at(offset).map(move |ch| { let before = offset; offset += ch.len_utf8(); @@ -824,8 +824,8 @@ pub fn chars_after( /// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer. pub fn chars_before( map: &DisplaySnapshot, - mut offset: usize, -) -> impl Iterator)> + '_ { + mut offset: MultiBufferOffset, +) -> impl Iterator)> + '_ { map.buffer_snapshot() .reversed_chars_at(offset) .map(move |ch| { @@ -1018,8 +1018,9 @@ mod tests { // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary let mut id = 0; - let inlays = (0..buffer_snapshot.len()) + let inlays = (0..buffer_snapshot.len().0) .flat_map(|offset| { + let offset = MultiBufferOffset(offset); [ Inlay::edit_prediction( post_inc(&mut id), @@ -1058,7 +1059,7 @@ mod tests { ), snapshot .buffer_snapshot() - .offset_to_point(5) + .offset_to_point(MultiBufferOffset(5)) .to_display_point(&snapshot), "Should not stop at inlays when looking for boundaries" ); diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index ffa0c017c0eb157df776cc49e0dba51e617e3379..f548db75ad5d8cfe32a59a798b6d23931c34f215 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -322,7 +322,11 @@ fn cancel_flycheck_action( .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection.start.buffer_id.or(selection.end.buffer_id)?; + let buffer_id = selection + .start + .text_anchor + .buffer_id + .or(selection.end.text_anchor.buffer_id)?; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -347,7 +351,11 @@ fn run_flycheck_action( .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection.start.buffer_id.or(selection.end.buffer_id)?; + let buffer_id = selection + .start + .text_anchor + .buffer_id + .or(selection.end.text_anchor.buffer_id)?; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -372,7 +380,11 @@ fn clear_flycheck_action( .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection.start.buffer_id.or(selection.end.buffer_id)?; + let buffer_id = selection + .start + .text_anchor + .buffer_id + .or(selection.end.text_anchor.buffer_id)?; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index d98dc89b6b0f1a5ab0ebd9a910db0fcb0db1f18c..422be9a54e7cfcc40484e4093eeab6c94ce7d8ee 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -251,7 +251,11 @@ impl ScrollManager { Bias::Left, ) .to_point(map); - let top_anchor = map.buffer_snapshot().anchor_after(scroll_top_buffer_point); + // Anchor the scroll position to the *left* of the first visible buffer point. + // + // This prevents the viewport from shifting down when blocks (e.g. expanded diff hunk + // deletions) are inserted *above* the first buffer character in the file. + let top_anchor = map.buffer_snapshot().anchor_before(scroll_top_buffer_point); self.set_anchor( ScrollAnchor { @@ -500,6 +504,7 @@ impl Editor { editor.register_visible_buffers(cx); editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); editor.update_lsp_data(None, window, cx); + editor.colorize_brackets(false, cx); }) .ok(); }); diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 3b2ed55df724485ee72e6afbc02c7111817869fb..5a1c849b2438fe987b24481b824375e188468916 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -71,14 +71,20 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + let display_snapshot = self.display_snapshot(cx); let scroll_margin_rows = self.vertical_scroll_margin() as u32; let new_screen_top = self .selections - .newest_display(&self.display_snapshot(cx)) + .newest_display(&display_snapshot) .head() .row() .0; - let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows); + let header_offset = display_snapshot + .buffer_snapshot() + .show_headers() + .then(|| display_snapshot.buffer_header_height()) + .unwrap_or(0); + let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows + header_offset); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); } diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 75fffdc7fea17fe35f9942125499ba15c9a77422..54bb7ceec1d035fbefb0c229c4e537e8277b67cd 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -1,18 +1,18 @@ use std::{ cmp, fmt, iter, mem, - ops::{Deref, DerefMut, Range, Sub}, + ops::{AddAssign, Deref, DerefMut, Range, Sub}, sync::Arc, }; use collections::HashMap; use gpui::Pixels; use itertools::Itertools as _; -use language::{Bias, Point, Selection, SelectionGoal, TextDimension}; +use language::{Bias, Point, Selection, SelectionGoal}; +use multi_buffer::{MultiBufferDimension, MultiBufferOffset}; use util::post_inc; use crate::{ Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset, - ToPoint, display_map::{DisplaySnapshot, ToDisplayPoint}, movement::TextLayoutDetails, }; @@ -97,7 +97,7 @@ impl SelectionsCollection { if self.pending.is_none() { self.disjoint_anchors_arc() } else { - let all_offset_selections = self.all::(snapshot); + let all_offset_selections = self.all::(snapshot); all_offset_selections .into_iter() .map(|selection| selection_to_anchor_selection(selection, snapshot)) @@ -113,10 +113,10 @@ impl SelectionsCollection { self.pending.as_mut().map(|pending| &mut pending.selection) } - pub fn pending>( - &self, - snapshot: &DisplaySnapshot, - ) -> Option> { + pub fn pending(&self, snapshot: &DisplaySnapshot) -> Option> + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { resolve_selections_wrapping_blocks(self.pending_anchor(), &snapshot).next() } @@ -124,9 +124,9 @@ impl SelectionsCollection { self.pending.as_ref().map(|pending| pending.mode.clone()) } - pub fn all<'a, D>(&self, snapshot: &DisplaySnapshot) -> Vec> + pub fn all(&self, snapshot: &DisplaySnapshot) -> Vec> where - D: 'a + TextDimension + Ord + Sub, + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, { let disjoint_anchors = &self.disjoint; let mut disjoint = @@ -136,7 +136,13 @@ impl SelectionsCollection { iter::from_fn(move || { if let Some(pending) = pending_opt.as_mut() { while let Some(next_selection) = disjoint.peek() { - if pending.start <= next_selection.end && pending.end >= next_selection.start { + if should_merge( + pending.start, + pending.end, + next_selection.start, + next_selection.end, + false, + ) { let next_selection = disjoint.next().unwrap(); if next_selection.start < pending.start { pending.start = next_selection.start; @@ -204,13 +210,13 @@ impl SelectionsCollection { } } - pub fn disjoint_in_range<'a, D>( + pub fn disjoint_in_range( &self, range: Range, snapshot: &DisplaySnapshot, ) -> Vec> where - D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord + std::fmt::Debug, { let start_ix = match self .disjoint @@ -236,7 +242,13 @@ impl SelectionsCollection { iter::from_fn(move || { if let Some(pending) = pending_opt.as_mut() { while let Some(next_selection) = disjoint.peek() { - if pending.start <= next_selection.end && pending.end >= next_selection.start { + if should_merge( + pending.start, + pending.end, + next_selection.start, + next_selection.end, + false, + ) { let next_selection = disjoint.next().unwrap(); if next_selection.start < pending.start { pending.start = next_selection.start; @@ -267,10 +279,10 @@ impl SelectionsCollection { .unwrap() } - pub fn newest>( - &self, - snapshot: &DisplaySnapshot, - ) -> Selection { + pub fn newest(&self, snapshot: &DisplaySnapshot) -> Selection + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { resolve_selections_wrapping_blocks([self.newest_anchor()], &snapshot) .next() .unwrap() @@ -290,10 +302,10 @@ impl SelectionsCollection { .unwrap() } - pub fn oldest>( - &self, - snapshot: &DisplaySnapshot, - ) -> Selection { + pub fn oldest(&self, snapshot: &DisplaySnapshot) -> Selection + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { resolve_selections_wrapping_blocks([self.oldest_anchor()], &snapshot) .next() .unwrap() @@ -306,27 +318,27 @@ impl SelectionsCollection { .unwrap_or_else(|| self.disjoint.first().cloned().unwrap()) } - pub fn first>( - &self, - snapshot: &DisplaySnapshot, - ) -> Selection { + pub fn first(&self, snapshot: &DisplaySnapshot) -> Selection + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { self.all(snapshot).first().unwrap().clone() } - pub fn last>( - &self, - snapshot: &DisplaySnapshot, - ) -> Selection { + pub fn last(&self, snapshot: &DisplaySnapshot) -> Selection + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { self.all(snapshot).last().unwrap().clone() } /// Returns a list of (potentially backwards!) ranges representing the selections. /// Useful for test assertions, but prefer `.all()` instead. #[cfg(any(test, feature = "test-support"))] - pub fn ranges>( - &self, - snapshot: &DisplaySnapshot, - ) -> Vec> { + pub fn ranges(&self, snapshot: &DisplaySnapshot) -> Vec> + where + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, + { self.all::(snapshot) .iter() .map(|s| { @@ -372,7 +384,7 @@ impl SelectionsCollection { let is_empty = positions.start == positions.end; let line_len = display_map.line_len(row); let line = display_map.layout_row(row, text_layout_details); - let start_col = line.index_for_x(positions.start) as u32; + let start_col = line.closest_index_for_x(positions.start) as u32; let (start, end) = if is_empty { let point = DisplayPoint::new(row, std::cmp::min(start_col, line_len)); @@ -382,7 +394,7 @@ impl SelectionsCollection { return None; } let start = DisplayPoint::new(row, start_col); - let end_col = line.index_for_x(positions.end) as u32; + let end_col = line.closest_index_for_x(positions.end) as u32; let end = DisplayPoint::new(row, end_col); (start, end) }; @@ -415,6 +427,37 @@ impl SelectionsCollection { !mutable_collection.disjoint.is_empty() || mutable_collection.pending.is_some(), "There must be at least one selection" ); + if cfg!(debug_assertions) { + mutable_collection.disjoint.iter().for_each(|selection| { + assert!( + snapshot.can_resolve(&selection.start), + "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}, {excerpt:?}", + excerpt = snapshot.buffer_for_excerpt(selection.start.excerpt_id).map(|snapshot| snapshot.remote_id()), + ); + assert!( + snapshot.can_resolve(&selection.end), + "disjoint selection end is not resolvable for the given snapshot: {selection:?}, {excerpt:?}", + excerpt = snapshot.buffer_for_excerpt(selection.end.excerpt_id).map(|snapshot| snapshot.remote_id()), + ); + }); + if let Some(pending) = &mutable_collection.pending { + let selection = &pending.selection; + assert!( + snapshot.can_resolve(&selection.start), + "pending selection start is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", + excerpt = snapshot + .buffer_for_excerpt(selection.start.excerpt_id) + .map(|snapshot| snapshot.remote_id()), + ); + assert!( + snapshot.can_resolve(&selection.end), + "pending selection end is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", + excerpt = snapshot + .buffer_for_excerpt(selection.end.excerpt_id) + .map(|snapshot| snapshot.remote_id()), + ); + } + } (mutable_collection.selections_changed, result) } @@ -509,11 +552,18 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { }; if filtered_selections.is_empty() { - let default_anchor = self.snapshot.anchor_before(0); + let buffer_snapshot = self.snapshot.buffer_snapshot(); + let anchor = buffer_snapshot + .excerpts() + .find(|(_, buffer, _)| buffer.remote_id() == buffer_id) + .and_then(|(excerpt_id, _, range)| { + buffer_snapshot.anchor_in_excerpt(excerpt_id, range.context.start) + }) + .unwrap_or_else(|| self.snapshot.anchor_before(MultiBufferOffset(0))); self.collection.disjoint = Arc::from([Selection { id: post_inc(&mut self.collection.next_selection_id), - start: default_anchor, - end: default_anchor, + start: anchor, + end: anchor, reversed: false, goal: SelectionGoal::None, }]); @@ -590,7 +640,7 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { pub fn insert_range(&mut self, range: Range) where - T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub + std::marker::Copy, + T: ToOffset, { let display_map = self.display_snapshot(); let mut selections = self.collection.all(&display_map); @@ -628,10 +678,13 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { }) .collect::>(); selections.sort_unstable_by_key(|s| s.start); - // Merge overlapping selections. + let mut i = 1; while i < selections.len() { - if selections[i].start <= selections[i - 1].end { + let prev = &selections[i - 1]; + let current = &selections[i]; + + if should_merge(prev.start, prev.end, current.start, current.end, true) { let removed = selections.remove(i); if removed.start < selections[i - 1].start { selections[i - 1].start = removed.start; @@ -656,7 +709,8 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { pub fn select_anchors(&mut self, selections: Vec>) { let map = self.display_snapshot(); let resolved_selections = - resolve_selections_wrapping_blocks::(&selections, &map).collect::>(); + resolve_selections_wrapping_blocks::(&selections, &map) + .collect::>(); self.select(resolved_selections); } @@ -673,7 +727,7 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { fn select_offset_ranges(&mut self, ranges: I) where - I: IntoIterator>, + I: IntoIterator>, { let selections = ranges .into_iter() @@ -808,13 +862,13 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { pub fn move_offsets_with( &mut self, - mut move_selection: impl FnMut(&MultiBufferSnapshot, &mut Selection), + mut move_selection: impl FnMut(&MultiBufferSnapshot, &mut Selection), ) { let mut changed = false; let display_map = self.display_snapshot(); let selections = self .collection - .all::(&display_map) + .all::(&display_map) .into_iter() .map(|selection| { let mut moved_selection = selection.clone(); @@ -938,7 +992,7 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { let map = self.display_snapshot(); let resolved_selections = resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect(); - self.select::(resolved_selections); + self.select::(resolved_selections); } if let Some(pending) = pending.as_mut() { @@ -981,7 +1035,7 @@ impl DerefMut for MutableSelectionsCollection<'_, '_> { } fn selection_to_anchor_selection( - selection: Selection, + selection: Selection, buffer: &MultiBufferSnapshot, ) -> Selection { let end_bias = if selection.start == selection.end { @@ -1054,7 +1108,7 @@ fn resolve_selections_display<'a>( coalesce_selections(selections) } -/// Resolves the passed in anchors to [`TextDimension`]s `D` +/// Resolves the passed in anchors to [`MultiBufferDimension`]s `D` /// wrapping around blocks inbetween. /// /// # Panics @@ -1065,7 +1119,7 @@ pub(crate) fn resolve_selections_wrapping_blocks<'a, D, I>( map: &'a DisplaySnapshot, ) -> impl 'a + Iterator> where - D: TextDimension + Ord + Sub, + D: MultiBufferDimension + Sub + AddAssign<::Output> + Ord, I: 'a + IntoIterator>, { // Transforms `Anchor -> DisplayPoint -> Point -> DisplayPoint -> D` @@ -1100,7 +1154,13 @@ fn coalesce_selections( iter::from_fn(move || { let mut selection = selections.next()?; while let Some(next_selection) = selections.peek() { - if selection.end >= next_selection.start { + if should_merge( + selection.start, + selection.end, + next_selection.start, + next_selection.end, + true, + ) { if selection.reversed == next_selection.reversed { selection.end = cmp::max(selection.end, next_selection.end); selections.next(); @@ -1122,3 +1182,35 @@ fn coalesce_selections( Some(selection) }) } + +/// Determines whether two selections should be merged into one. +/// +/// Two selections should be merged when: +/// 1. They overlap: the selections share at least one position +/// 2. They have the same start position: one contains or equals the other +/// 3. A cursor touches a selection boundary: a zero-width selection (cursor) at the +/// start or end of another selection should be absorbed into it +/// +/// Note: two selections that merely touch (one ends exactly where the other begins) +/// but don't share any positions remain separate, see: https://github.com/zed-industries/zed/issues/24748 +fn should_merge(a_start: T, a_end: T, b_start: T, b_end: T, sorted: bool) -> bool { + let is_overlapping = if sorted { + // When sorted, `a` starts before or at `b`, so overlap means `b` starts before `a` ends + b_start < a_end + } else { + a_start < b_end && b_start < a_end + }; + + // Selections starting at the same position should always merge (one contains the other) + let same_start = a_start == b_start; + + // A cursor (zero-width selection) touching another selection's boundary should merge. + // This handles cases like a cursor at position X merging with a selection that + // starts or ends at X. + let is_cursor_a = a_start == a_end; + let is_cursor_b = b_start == b_end; + let cursor_at_boundary = (is_cursor_a && (a_start == b_start || a_end == b_end)) + || (is_cursor_b && (b_start == a_start || b_end == a_end)); + + is_overlapping || same_start || cursor_at_boundary +} diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 8d74638e4c2aaf356ffabdeef717b9b105487ee3..2554db2450103709275b3f7946076fd891326d84 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -1,13 +1,13 @@ use crate::actions::ShowSignatureHelp; use crate::hover_popover::open_markdown_url; -use crate::{Editor, EditorSettings, ToggleAutoSignatureHelp, hover_markdown_style}; +use crate::{BufferOffset, Editor, EditorSettings, ToggleAutoSignatureHelp, hover_markdown_style}; use gpui::{ App, Context, Entity, HighlightStyle, MouseButton, ScrollHandle, Size, StyledText, Task, TextStyle, Window, combine_highlights, }; use language::BufferSnapshot; use markdown::{Markdown, MarkdownElement}; -use multi_buffer::{Anchor, ToOffset}; +use multi_buffer::{Anchor, MultiBufferOffset, ToOffset}; use settings::Settings; use std::ops::Range; use text::Rope; @@ -82,7 +82,9 @@ impl Editor { if !(self.signature_help_state.is_shown() || self.auto_signature_help_enabled(cx)) { return false; } - let newest_selection = self.selections.newest::(&self.display_snapshot(cx)); + let newest_selection = self + .selections + .newest::(&self.display_snapshot(cx)); let head = newest_selection.head(); if !newest_selection.is_empty() && head != newest_selection.tail() { @@ -92,14 +94,14 @@ impl Editor { } let buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let bracket_range = |position: usize| match (position, position + 1) { - (0, b) if b <= buffer_snapshot.len() => 0..b, - (0, b) => 0..b - 1, + let bracket_range = |position: MultiBufferOffset| match (position, position + 1usize) { + (MultiBufferOffset(0), b) if b <= buffer_snapshot.len() => MultiBufferOffset(0)..b, + (MultiBufferOffset(0), b) => MultiBufferOffset(0)..b - 1, (a, b) if b <= buffer_snapshot.len() => a - 1..b, (a, b) => a - 1..b - 1, }; let not_quote_like_brackets = - |buffer: &BufferSnapshot, start: Range, end: Range| { + |buffer: &BufferSnapshot, start: Range, end: Range| { let text_start = buffer.text_for_range(start).collect::(); let text_end = buffer.text_for_range(end).collect::(); QUOTE_PAIRS @@ -389,7 +391,7 @@ impl SignatureHelpPopover { ) }), ) - .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx); + .vertical_scrollbar_for(&self.scroll_handle, window, cx); let controls = if self.signatures.len() > 1 { let prev_button = IconButton::new("signature_help_prev", IconName::ChevronUp) diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs new file mode 100644 index 0000000000000000000000000000000000000000..b5090f06dc1e68d609413db31112775e56559689 --- /dev/null +++ b/crates/editor/src/split.rs @@ -0,0 +1,267 @@ +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use gpui::{ + Action, AppContext as _, Entity, EventEmitter, Focusable, NoAction, Subscription, WeakEntity, +}; +use multi_buffer::{MultiBuffer, MultiBufferFilterMode}; +use project::Project; +use ui::{ + App, Context, InteractiveElement as _, IntoElement as _, ParentElement as _, Render, + Styled as _, Window, div, +}; +use workspace::{ + ActivePaneDecorator, Item, ItemHandle, Pane, PaneGroup, SplitDirection, Workspace, +}; + +use crate::{Editor, EditorEvent}; + +struct SplitDiffFeatureFlag; + +impl FeatureFlag for SplitDiffFeatureFlag { + const NAME: &'static str = "split-diff"; + + fn enabled_for_staff() -> bool { + true + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Action, Default)] +#[action(namespace = editor)] +struct SplitDiff; + +#[derive(Clone, Copy, PartialEq, Eq, Action, Default)] +#[action(namespace = editor)] +struct UnsplitDiff; + +pub struct SplittableEditor { + primary_editor: Entity, + secondary: Option, + panes: PaneGroup, + workspace: WeakEntity, + _subscriptions: Vec, +} + +struct SecondaryEditor { + editor: Entity, + pane: Entity, + has_latest_selection: bool, + _subscriptions: Vec, +} + +impl SplittableEditor { + pub fn primary_editor(&self) -> &Entity { + &self.primary_editor + } + + pub fn last_selected_editor(&self) -> &Entity { + if let Some(secondary) = &self.secondary + && secondary.has_latest_selection + { + &secondary.editor + } else { + &self.primary_editor + } + } + + pub fn new_unsplit( + buffer: Entity, + project: Entity, + workspace: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let primary_editor = + cx.new(|cx| Editor::for_multibuffer(buffer, Some(project.clone()), window, cx)); + let pane = cx.new(|cx| { + let mut pane = Pane::new( + workspace.downgrade(), + project, + Default::default(), + None, + NoAction.boxed_clone(), + true, + window, + cx, + ); + pane.set_should_display_tab_bar(|_, _| false); + pane.add_item(primary_editor.boxed_clone(), true, true, None, window, cx); + pane + }); + let panes = PaneGroup::new(pane); + // TODO(split-diff) we might want to tag editor events with whether they came from primary/secondary + let subscriptions = + vec![ + cx.subscribe(&primary_editor, |this, _, event: &EditorEvent, cx| { + if let EditorEvent::SelectionsChanged { .. } = event + && let Some(secondary) = &mut this.secondary + { + secondary.has_latest_selection = false; + } + cx.emit(event.clone()) + }), + ]; + + window.defer(cx, { + let workspace = workspace.downgrade(); + let primary_editor = primary_editor.downgrade(); + move |window, cx| { + workspace + .update(cx, |workspace, cx| { + primary_editor.update(cx, |editor, cx| { + editor.added_to_workspace(workspace, window, cx); + }) + }) + .ok(); + } + }); + Self { + primary_editor, + secondary: None, + panes, + workspace: workspace.downgrade(), + _subscriptions: subscriptions, + } + } + + fn split(&mut self, _: &SplitDiff, window: &mut Window, cx: &mut Context) { + if !cx.has_flag::() { + return; + } + if self.secondary.is_some() { + return; + } + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let project = workspace.read(cx).project().clone(); + let follower = self.primary_editor.update(cx, |primary, cx| { + primary.buffer().update(cx, |buffer, cx| { + let follower = buffer.get_or_create_follower(cx); + buffer.set_filter_mode(Some(MultiBufferFilterMode::KeepInsertions)); + follower + }) + }); + follower.update(cx, |follower, _| { + follower.set_filter_mode(Some(MultiBufferFilterMode::KeepDeletions)); + }); + let secondary_editor = workspace.update(cx, |workspace, cx| { + cx.new(|cx| { + let mut editor = Editor::for_multibuffer(follower, Some(project), window, cx); + // TODO(split-diff) this should be at the multibuffer level + editor.set_use_base_text_line_numbers(true, cx); + editor.added_to_workspace(workspace, window, cx); + editor + }) + }); + let secondary_pane = cx.new(|cx| { + let mut pane = Pane::new( + workspace.downgrade(), + workspace.read(cx).project().clone(), + Default::default(), + None, + NoAction.boxed_clone(), + true, + window, + cx, + ); + pane.set_should_display_tab_bar(|_, _| false); + pane.add_item( + ItemHandle::boxed_clone(&secondary_editor), + false, + false, + None, + window, + cx, + ); + pane + }); + + let subscriptions = + vec![ + cx.subscribe(&secondary_editor, |this, _, event: &EditorEvent, cx| { + if let EditorEvent::SelectionsChanged { .. } = event + && let Some(secondary) = &mut this.secondary + { + secondary.has_latest_selection = true; + } + cx.emit(event.clone()) + }), + ]; + self.secondary = Some(SecondaryEditor { + editor: secondary_editor, + pane: secondary_pane.clone(), + has_latest_selection: false, + _subscriptions: subscriptions, + }); + let primary_pane = self.panes.first_pane(); + self.panes + .split(&primary_pane, &secondary_pane, SplitDirection::Left, cx) + .unwrap(); + cx.notify(); + } + + fn unsplit(&mut self, _: &UnsplitDiff, _: &mut Window, cx: &mut Context) { + let Some(secondary) = self.secondary.take() else { + return; + }; + self.panes.remove(&secondary.pane, cx).unwrap(); + self.primary_editor.update(cx, |primary, cx| { + primary.buffer().update(cx, |buffer, _| { + buffer.set_filter_mode(None); + }); + }); + cx.notify(); + } + + pub fn added_to_workspace( + &mut self, + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + self.workspace = workspace.weak_handle(); + self.primary_editor.update(cx, |primary_editor, cx| { + primary_editor.added_to_workspace(workspace, window, cx); + }); + if let Some(secondary) = &self.secondary { + secondary.editor.update(cx, |secondary_editor, cx| { + secondary_editor.added_to_workspace(workspace, window, cx); + }); + } + } +} + +impl EventEmitter for SplittableEditor {} +impl Focusable for SplittableEditor { + fn focus_handle(&self, cx: &App) -> gpui::FocusHandle { + self.primary_editor.read(cx).focus_handle(cx) + } +} + +impl Render for SplittableEditor { + fn render( + &mut self, + window: &mut ui::Window, + cx: &mut ui::Context, + ) -> impl ui::IntoElement { + let inner = if self.secondary.is_none() { + self.primary_editor.clone().into_any_element() + } else if let Some(active) = self.panes.panes().into_iter().next() { + self.panes + .render( + None, + &ActivePaneDecorator::new(active, &self.workspace), + window, + cx, + ) + .into_any_element() + } else { + div().into_any_element() + }; + div() + .id("splittable-editor") + .on_action(cx.listener(Self::split)) + .on_action(cx.listener(Self::unsplit)) + .size_full() + .child(inner) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 9d1003e8c08b3d725ffa13b90eb0ee405520d8cd..1cc619385446502db6a3a0dceb6e70fa4b4e8416 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -16,7 +16,7 @@ use gpui::{ AppContext as _, Context, Entity, EntityId, Font, FontFeatures, FontStyle, FontWeight, Pixels, VisualTestContext, Window, font, size, }; -use multi_buffer::ToPoint; +use multi_buffer::{MultiBufferOffset, ToPoint}; use pretty_assertions::assert_eq; use project::{Project, project_settings::DiagnosticSeverity}; use ui::{App, BorrowAppContext, px}; @@ -78,7 +78,7 @@ pub fn marked_display_snapshot( let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); let markers = markers .into_iter() - .map(|offset| offset.to_display_point(&snapshot)) + .map(|offset| MultiBufferOffset(offset).to_display_point(&snapshot)) .collect(); (snapshot, markers) @@ -94,7 +94,11 @@ pub fn select_ranges( let (unmarked_text, text_ranges) = marked_text_ranges(marked_text, true); assert_eq!(editor.text(cx), unmarked_text); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(text_ranges) + s.select_ranges( + text_ranges + .into_iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }); } @@ -108,7 +112,12 @@ pub fn assert_text_with_selections( assert_eq!(editor.text(cx), unmarked_text, "text doesn't match"); let actual = generate_marked_text( &editor.text(cx), - &editor.selections.ranges(&editor.display_snapshot(cx)), + &editor + .selections + .ranges::(&editor.display_snapshot(cx)) + .into_iter() + .map(|range| range.start.0..range.end.0) + .collect::>(), marked_text.contains("«"), ); assert_eq!(actual, marked_text, "Selections don't match"); @@ -167,11 +176,9 @@ pub fn block_content_for_tests( } pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestContext) -> String { - cx.draw( - gpui::Point::default(), - size(px(3000.0), px(3000.0)), - |_, _| editor.clone(), - ); + let draw_size = size(px(3000.0), px(3000.0)); + cx.simulate_resize(draw_size); + cx.draw(gpui::Point::default(), draw_size, |_, _| editor.clone()); let (snapshot, mut lines, blocks) = editor.update_in(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); let text = editor.display_text(cx); diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 427f0bd0de4d56bd01f6a1525ec8aaaf83fe3870..7c4c0e48d36dbb9f74a1c835c63fa2b91c5681d9 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -6,7 +6,8 @@ use std::{ }; use anyhow::Result; -use language::rust_lang; +use language::{markdown_lang, rust_lang}; +use multi_buffer::MultiBufferOffset; use serde_json::json; use crate::{Editor, ToPoint}; @@ -125,7 +126,7 @@ impl EditorLspTestContext { .read(cx) .nav_history_for_item(&cx.entity()); editor.set_nav_history(Some(nav_history)); - window.focus(&editor.focus_handle(cx)) + window.focus(&editor.focus_handle(cx), cx) }); let lsp = fake_servers.next().await.unwrap(); @@ -313,54 +314,58 @@ impl EditorLspTestContext { Self::new(language, Default::default(), cx).await } + pub async fn new_markdown_with_rust(cx: &mut gpui::TestAppContext) -> Self { + let context = Self::new( + Arc::into_inner(markdown_lang()).unwrap(), + Default::default(), + cx, + ) + .await; + + let language_registry = context.workspace.read_with(cx, |workspace, cx| { + workspace.project().read(cx).languages().clone() + }); + language_registry.add(rust_lang()); + + context + } + /// Constructs lsp range using a marked string with '[', ']' range delimiters #[track_caller] pub fn lsp_range(&mut self, marked_text: &str) -> lsp::Range { let ranges = self.ranges(marked_text); - self.to_lsp_range(ranges[0].clone()) + self.to_lsp_range(MultiBufferOffset(ranges[0].start)..MultiBufferOffset(ranges[0].end)) } #[expect(clippy::wrong_self_convention, reason = "This is test code")] - pub fn to_lsp_range(&mut self, range: Range) -> lsp::Range { + pub fn to_lsp_range(&mut self, range: Range) -> lsp::Range { + use language::ToPointUtf16; let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx)); let start_point = range.start.to_point(&snapshot.buffer_snapshot()); let end_point = range.end.to_point(&snapshot.buffer_snapshot()); self.editor(|editor, _, cx| { let buffer = editor.buffer().read(cx); - let start = point_to_lsp( - buffer - .point_to_buffer_offset(start_point, cx) - .unwrap() - .1 - .to_point_utf16(&buffer.read(cx)), - ); - let end = point_to_lsp( - buffer - .point_to_buffer_offset(end_point, cx) - .unwrap() - .1 - .to_point_utf16(&buffer.read(cx)), - ); - + let (start_buffer, start_offset) = + buffer.point_to_buffer_offset(start_point, cx).unwrap(); + let start = point_to_lsp(start_offset.to_point_utf16(&start_buffer.read(cx))); + let (end_buffer, end_offset) = buffer.point_to_buffer_offset(end_point, cx).unwrap(); + let end = point_to_lsp(end_offset.to_point_utf16(&end_buffer.read(cx))); lsp::Range { start, end } }) } #[expect(clippy::wrong_self_convention, reason = "This is test code")] - pub fn to_lsp(&mut self, offset: usize) -> lsp::Position { + pub fn to_lsp(&mut self, offset: MultiBufferOffset) -> lsp::Position { + use language::ToPointUtf16; + let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx)); let point = offset.to_point(&snapshot.buffer_snapshot()); self.editor(|editor, _, cx| { let buffer = editor.buffer().read(cx); - point_to_lsp( - buffer - .point_to_buffer_offset(point, cx) - .unwrap() - .1 - .to_point_utf16(&buffer.read(cx)), - ) + let (buffer, offset) = buffer.point_to_buffer_offset(point, cx).unwrap(); + point_to_lsp(offset.to_point_utf16(&buffer.read(cx))) }) } diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 7f5bb227fb98d1ebe5df51d59bdae22825bc4fef..267058691d0070678830ba9d7c40f54a9363737b 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -13,7 +13,7 @@ use gpui::{ }; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::{Anchor, ExcerptRange, MultiBufferRow}; +use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -59,6 +59,17 @@ impl EditorTestContext { }) .await .unwrap(); + + let language = project + .read_with(cx, |project, _cx| { + project.languages().language_for_name("Plain Text") + }) + .await + .unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(language), cx); + }); + let editor = cx.add_window(|window, cx| { let editor = build_editor_with_project( project, @@ -67,7 +78,7 @@ impl EditorTestContext { cx, ); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor }); let editor_view = editor.root(cx).unwrap(); @@ -128,7 +139,7 @@ impl EditorTestContext { let editor = cx.add_window(|window, cx| { let editor = build_editor(buffer, window, cx); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor }); @@ -256,7 +267,7 @@ impl EditorTestContext { let snapshot = self.editor.update_in(&mut self.cx, |editor, window, cx| { editor.snapshot(window, cx) }); - ranges[0].start.to_display_point(&snapshot) + MultiBufferOffset(ranges[0].start).to_display_point(&snapshot) } pub fn pixel_position(&mut self, marked_text: &str) -> Point { @@ -272,8 +283,7 @@ impl EditorTestContext { .head(); let pixel_position = editor.pixel_position_of_newest_cursor.unwrap(); let line_height = editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()); let snapshot = editor.snapshot(window, cx); @@ -295,6 +305,12 @@ impl EditorTestContext { snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end) } + pub async fn wait_for_autoindent_applied(&mut self) { + if let Some(fut) = self.update_buffer(|buffer, _| buffer.wait_for_autoindent_applied()) { + fut.await.ok(); + } + } + pub fn set_head_text(&mut self, diff_base: &str) { self.cx.run_until_parked(); let fs = @@ -362,7 +378,11 @@ impl EditorTestContext { self.editor.update_in(&mut self.cx, |editor, window, cx| { editor.set_text(unmarked_text, window, cx); editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(selection_ranges) + s.select_ranges( + selection_ranges + .into_iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }) }); state_context @@ -379,7 +399,11 @@ impl EditorTestContext { self.editor.update_in(&mut self.cx, |editor, window, cx| { assert_eq!(editor.text(cx), unmarked_text); editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(selection_ranges) + s.select_ranges( + selection_ranges + .into_iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }) }); state_context @@ -471,11 +495,7 @@ impl EditorTestContext { ); assert_eq!( multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer( - excerpt_id, - snapshot.remote_id(), - range.context.clone() - )) + .text_for_range(Anchor::range_in_buffer(excerpt_id, range.context.clone())) .collect::(), expected_text, "{}", @@ -565,6 +585,7 @@ impl EditorTestContext { .unwrap_or_default() .iter() .map(|range| range.to_offset(&snapshot.buffer_snapshot())) + .map(|range| range.start.0..range.end.0) .collect() }); assert_set_eq!(actual_ranges, expected_ranges); @@ -580,6 +601,7 @@ impl EditorTestContext { .unwrap_or_default() .into_iter() .map(|range| range.to_offset(&snapshot.buffer_snapshot())) + .map(|range| range.start.0..range.end.0) .collect(); assert_set_eq!(actual_ranges, expected_ranges); } @@ -597,14 +619,16 @@ impl EditorTestContext { fn editor_selections(&mut self) -> Vec> { self.editor .update(&mut self.cx, |editor, cx| { - editor.selections.all::(&editor.display_snapshot(cx)) + editor + .selections + .all::(&editor.display_snapshot(cx)) }) .into_iter() .map(|s| { if s.reversed { - s.end..s.start + s.end.0..s.start.0 } else { - s.start..s.end + s.start.0..s.end.0 } }) .collect::>() @@ -652,11 +676,7 @@ impl std::fmt::Display for FormatMultiBufferAsMarkedText { } let mut text = multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer( - *excerpt_id, - snapshot.remote_id(), - range.context.clone(), - )) + .text_for_range(Anchor::range_in_buffer(*excerpt_id, range.context.clone())) .collect::(); let selections = selections @@ -700,7 +720,10 @@ pub fn assert_state_with_diff( snapshot.buffer_snapshot().clone(), editor .selections - .ranges::(&snapshot.display_snapshot), + .ranges::(&snapshot.display_snapshot) + .into_iter() + .map(|range| range.start.0..range.end.0) + .collect::>(), ) }); diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index d04dad8e99961480ce5f08328fa97aeabf5eda10..80633696b7d5e655bb7db3627568b881642cf62c 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -25,7 +25,7 @@ use language_model::{ConfiguredModel, LanguageModel, LanguageModelRegistry, Sele use node_runtime::{NodeBinaryOptions, NodeRuntime}; use project::project_settings::ProjectSettings; use prompt_store::PromptBuilder; -use release_channel::AppVersion; +use release_channel::{AppCommitSha, AppVersion}; use reqwest_client::ReqwestClient; use settings::{Settings, SettingsStore}; use std::cell::RefCell; @@ -347,8 +347,15 @@ pub struct AgentAppState { } pub fn init(cx: &mut App) -> Arc { - let app_version = AppVersion::load(env!("ZED_PKG_VERSION")); - release_channel::init(app_version, cx); + let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned())); + + let app_version = AppVersion::load( + env!("ZED_PKG_VERSION"), + option_env!("ZED_BUILD_ID"), + app_commit_sha, + ); + + release_channel::init(app_version.clone(), cx); gpui_tokio::init(cx); let settings_store = SettingsStore::new(cx, &settings::default_settings()); @@ -463,8 +470,8 @@ pub fn find_model( .ok_or_else(|| { anyhow::anyhow!( "No language model with ID {}/{} was available. Available models: {}", - selected.model.0, selected.provider.0, + selected.model.0, model_registry .available_models(cx) .map(|model| format!("{}/{}", model.provider_id().0, model.id().0)) diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index 84c47766e96948bccfc01f3b4472b5100c4b7b64..c4d076037f637ffdf2b8d4c8bbed05349d9ea38e 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -261,7 +261,7 @@ impl ExampleContext { .expect("Unknown tool_name content in meta"); tool_uses_by_id.insert( - tool_call.id, + tool_call.tool_call_id, ToolUse { name: tool_name.to_string(), value: tool_call.raw_input.unwrap_or_default(), @@ -277,7 +277,9 @@ impl ExampleContext { ThreadEvent::ToolCallUpdate(tool_call_update) => { if let acp_thread::ToolCallUpdate::UpdateFields(update) = tool_call_update { if let Some(raw_input) = update.fields.raw_input { - if let Some(tool_use) = tool_uses_by_id.get_mut(&update.id) { + if let Some(tool_use) = + tool_uses_by_id.get_mut(&update.tool_call_id) + { tool_use.value = raw_input; } } @@ -290,7 +292,7 @@ impl ExampleContext { update.fields.status == Some(acp::ToolCallStatus::Completed); let tool_use = tool_uses_by_id - .remove(&update.id) + .remove(&update.tool_call_id) .expect("Unrecognized tool call completed"); let log_message = if succeeded { @@ -337,10 +339,7 @@ impl ExampleContext { acp::StopReason::MaxTurnRequests => { return Err(anyhow!("Exceeded maximum turn requests")); } - acp::StopReason::Refusal => { - return Err(anyhow!("Refusal")); - } - acp::StopReason::Cancelled => return Err(anyhow!("Cancelled")), + stop_reason => return Err(anyhow!("{stop_reason:?}")), }, } } diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 035f1ec0ac8d0c6490dc39637e03e377ee3d194b..8c9da3eefab61e4fa5897f9d76123c3fe1d5fa8b 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -202,6 +202,7 @@ impl ExampleInstance { app_state.languages.clone(), app_state.fs.clone(), None, + false, cx, ); @@ -303,13 +304,12 @@ impl ExampleInstance { let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let thread = if let Some(json) = &meta.existing_thread_json { - let session_id = acp::SessionId( + let session_id = acp::SessionId::new( rand::rng() .sample_iter(&distr::Alphanumeric) .take(7) .map(char::from) - .collect::() - .into(), + .collect::(), ); let db_thread = agent::DbThread::from_json(json.as_bytes()).expect("Can't read serialized thread"); @@ -553,6 +553,7 @@ impl ExampleInstance { role: Role::User, content: vec![MessageContent::Text(to_prompt(assertion.description))], cache: false, + reasoning_details: None, }], temperature: None, tools: Vec::new(), @@ -625,6 +626,15 @@ impl agent::TerminalHandle for EvalTerminalHandle { self.terminal .read_with(cx, |term, cx| term.current_output(cx)) } + + fn kill(&self, cx: &AsyncApp) -> Result<()> { + cx.update(|cx| { + self.terminal.update(cx, |terminal, cx| { + terminal.kill(cx); + }); + })?; + Ok(()) + } } impl agent::ThreadEnvironment for EvalThreadEnvironment { @@ -639,7 +649,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment { cx.spawn(async move |cx| { let language_registry = project.read_with(cx, |project, _cx| project.languages().clone())?; - let id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into()); + let id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string()); let terminal = acp_thread::create_terminal_entity(command, &[], vec![], cwd.clone(), &project, cx) .await?; @@ -892,7 +902,7 @@ pub fn wait_for_lang_server( .update(cx, |buffer, cx| { lsp_store.update(cx, |lsp_store, cx| { lsp_store - .language_servers_for_local_buffer(buffer, cx) + .running_language_servers_for_local_buffer(buffer, cx) .next() .is_some() }) @@ -1251,8 +1261,12 @@ pub fn response_events_to_markdown( } Ok( LanguageModelCompletionEvent::UsageUpdate(_) + | LanguageModelCompletionEvent::ToolUseLimitReached | LanguageModelCompletionEvent::StartMessage { .. } - | LanguageModelCompletionEvent::StatusUpdate { .. }, + | LanguageModelCompletionEvent::UsageUpdated { .. } + | LanguageModelCompletionEvent::Queued { .. } + | LanguageModelCompletionEvent::Started + | LanguageModelCompletionEvent::ReasoningDetails(_), ) => {} Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { json_parse_error, .. @@ -1337,9 +1351,13 @@ impl ThreadDialog { // Skip these Ok(LanguageModelCompletionEvent::UsageUpdate(_)) | Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) - | Ok(LanguageModelCompletionEvent::StatusUpdate { .. }) | Ok(LanguageModelCompletionEvent::StartMessage { .. }) - | Ok(LanguageModelCompletionEvent::Stop(_)) => {} + | Ok(LanguageModelCompletionEvent::ReasoningDetails(_)) + | Ok(LanguageModelCompletionEvent::Stop(_)) + | Ok(LanguageModelCompletionEvent::Queued { .. }) + | Ok(LanguageModelCompletionEvent::Started) + | Ok(LanguageModelCompletionEvent::UsageUpdated { .. }) + | Ok(LanguageModelCompletionEvent::ToolUseLimitReached) => {} Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { json_parse_error, @@ -1366,6 +1384,7 @@ impl ThreadDialog { role: Role::Assistant, content, cache: false, + reasoning_details: None, }) } else { None diff --git a/crates/eval_utils/Cargo.toml b/crates/eval_utils/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..a512035f5d1754f0f6f942faa27d063e169a22ef --- /dev/null +++ b/crates/eval_utils/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "eval_utils" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/eval_utils.rs" +doctest = false + +[dependencies] +gpui.workspace = true +serde.workspace = true +smol.workspace = true diff --git a/crates/eval_utils/LICENSE-GPL b/crates/eval_utils/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..e0f9dbd5d63fef1630c297edc4ceba4790be6f02 --- /dev/null +++ b/crates/eval_utils/LICENSE-GPL @@ -0,0 +1 @@ +LICENSE-GPL \ No newline at end of file diff --git a/crates/eval_utils/README.md b/crates/eval_utils/README.md new file mode 100644 index 0000000000000000000000000000000000000000..617077a81524ff918e8b9b93aa970d636504479c --- /dev/null +++ b/crates/eval_utils/README.md @@ -0,0 +1,3 @@ +# eval_utils + +Utilities for evals of agents. diff --git a/crates/eval_utils/src/eval_utils.rs b/crates/eval_utils/src/eval_utils.rs new file mode 100644 index 0000000000000000000000000000000000000000..be3294ed1490d6a602c3a5282d25dbba7d065443 --- /dev/null +++ b/crates/eval_utils/src/eval_utils.rs @@ -0,0 +1,146 @@ +//! Utilities for evaluation and benchmarking. + +use std::{ + collections::HashMap, + sync::{Arc, mpsc}, +}; + +fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usize) { + let passed_count = evaluated_count - failed_count; + let passed_ratio = if evaluated_count == 0 { + 0.0 + } else { + passed_count as f64 / evaluated_count as f64 + }; + println!( + "\r\x1b[KEvaluated {}/{} ({:.2}% passed)", + evaluated_count, + iterations, + passed_ratio * 100.0 + ) +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum OutcomeKind { + Passed, + Failed, + Error, +} + +pub trait EvalOutputProcessor { + type Metadata: 'static + Send; + fn process(&mut self, output: &EvalOutput); + fn assert(&mut self); +} + +#[derive(Clone, Debug)] +pub struct EvalOutput { + pub outcome: OutcomeKind, + pub data: String, + pub metadata: M, +} + +impl EvalOutput { + pub fn passed(message: impl Into) -> Self { + EvalOutput { + outcome: OutcomeKind::Passed, + data: message.into(), + metadata: M::default(), + } + } + + pub fn failed(message: impl Into) -> Self { + EvalOutput { + outcome: OutcomeKind::Failed, + data: message.into(), + metadata: M::default(), + } + } +} + +pub struct NoProcessor; +impl EvalOutputProcessor for NoProcessor { + type Metadata = (); + + fn process(&mut self, _output: &EvalOutput) {} + + fn assert(&mut self) {} +} + +pub fn eval, _keymap: &Keymap) { - self.inner.state.borrow_mut().menus = menus.into_iter().map(|menu| menu.owned()).collect(); + *self.inner.state.menus.borrow_mut() = menus.into_iter().map(|menu| menu.owned()).collect(); } fn get_menus(&self) -> Option> { - Some(self.inner.state.borrow().menus.clone()) + Some(self.inner.state.menus.borrow().clone()) } fn set_dock_menu(&self, menus: Vec, _keymap: &Keymap) { @@ -552,19 +560,27 @@ impl Platform for WindowsPlatform { } fn on_app_menu_action(&self, callback: Box) { - self.inner.state.borrow_mut().callbacks.app_menu_action = Some(callback); + self.inner + .state + .callbacks + .app_menu_action + .set(Some(callback)); } fn on_will_open_app_menu(&self, callback: Box) { - self.inner.state.borrow_mut().callbacks.will_open_app_menu = Some(callback); + self.inner + .state + .callbacks + .will_open_app_menu + .set(Some(callback)); } fn on_validate_app_menu_command(&self, callback: Box bool>) { self.inner .state - .borrow_mut() .callbacks - .validate_app_menu_command = Some(callback); + .validate_app_menu_command + .set(Some(callback)); } fn app_path(&self) -> Result { @@ -578,14 +594,13 @@ impl Platform for WindowsPlatform { fn set_cursor_style(&self, style: CursorStyle) { let hcursor = load_cursor(style); - let mut lock = self.inner.state.borrow_mut(); - if lock.current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) { + if self.inner.state.current_cursor.get().map(|c| c.0) != hcursor.map(|c| c.0) { self.post_message( WM_GPUI_CURSOR_STYLE_CHANGED, WPARAM(0), LPARAM(hcursor.map_or(0, |c| c.0 as isize)), ); - lock.current_cursor = hcursor; + self.inner.state.current_cursor.set(hcursor); } } @@ -632,15 +647,24 @@ impl Platform for WindowsPlatform { .collect_vec(); self.foreground_executor().spawn(async move { let mut credentials: *mut CREDENTIALW = std::ptr::null_mut(); - unsafe { + let result = unsafe { CredReadW( PCWSTR::from_raw(target_name.as_ptr()), CRED_TYPE_GENERIC, None, &mut credentials, - )? + ) }; + if let Err(err) = result { + // ERROR_NOT_FOUND means the credential doesn't exist. + // Return Ok(None) to match macOS and Linux behavior. + if err.code().0 == ERROR_NOT_FOUND.0 as i32 { + return Ok(None); + } + return Err(err.into()); + } + if credentials.is_null() { Ok(None) } else { @@ -702,12 +726,12 @@ impl Platform for WindowsPlatform { impl WindowsPlatformInner { fn new(context: &mut PlatformWindowCreateContext) -> Result> { - let state = RefCell::new(WindowsPlatformState::new( + let state = WindowsPlatformState::new( context .directx_devices .take() .context("missing directx devices")?, - )); + ); Ok(Rc::new(Self { state, raw_window_handles: context.raw_window_handles.clone(), @@ -724,6 +748,19 @@ impl WindowsPlatformInner { })) } + /// Calls `project` to project to the corresponding callback field, removes it from callbacks, calls `f` with the callback and then puts the callback back. + fn with_callback( + &self, + project: impl Fn(&PlatformCallbacks) -> &Cell>, + f: impl FnOnce(&mut T), + ) { + let callback = project(&self.state.callbacks).take(); + if let Some(mut callback) = callback { + f(&mut callback); + project(&self.state.callbacks).set(Some(callback)); + } + } + fn handle_msg( self: &Rc, handle: HWND, @@ -781,24 +818,60 @@ impl WindowsPlatformInner { #[inline] fn run_foreground_task(&self) -> Option { - loop { - for runnable in self.main_receiver.drain() { - runnable.run(); + const MAIN_TASK_TIMEOUT: u128 = 10; + + let start = std::time::Instant::now(); + 'tasks: loop { + 'timeout_loop: loop { + if start.elapsed().as_millis() >= MAIN_TASK_TIMEOUT { + log::debug!("foreground task timeout reached"); + // we spent our budget on gpui tasks, we likely have a lot of work queued so drain system events first to stay responsive + // then quit out of foreground work to allow us to process other gpui events first before returning back to foreground task work + // if we don't we might not for example process window quit events + let mut msg = MSG::default(); + let process_message = |msg: &_| { + if translate_accelerator(msg).is_none() { + _ = unsafe { TranslateMessage(msg) }; + unsafe { DispatchMessageW(msg) }; + } + }; + let peek_msg = |msg: &mut _, msg_kind| unsafe { + PeekMessageW(msg, None, 0, 0, PM_REMOVE | msg_kind).as_bool() + }; + if peek_msg(&mut msg, PM_QS_PAINT) { + process_message(&msg); + } + while peek_msg(&mut msg, PM_QS_INPUT) { + process_message(&msg); + } + // Allow the main loop to process other gpui events before going back into `run_foreground_task` + unsafe { + if let Err(_) = PostMessageW( + Some(self.dispatcher.platform_window_handle.as_raw()), + WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, + WPARAM(self.validation_number), + LPARAM(0), + ) { + self.dispatcher.wake_posted.store(false, Ordering::Release); + }; + } + break 'tasks; + } + match self.main_receiver.try_recv() { + Err(_) => break 'timeout_loop, + Ok(runnable) => WindowsDispatcher::execute_runnable(runnable), + } } // Someone could enqueue a Runnable here. The flag is still true, so they will not PostMessage. // We need to check for those Runnables after we clear the flag. - let dispatcher = self.dispatcher.clone(); - - dispatcher.wake_posted.store(false, Ordering::Release); + self.dispatcher.wake_posted.store(false, Ordering::Release); match self.main_receiver.try_recv() { + Err(_) => break 'tasks, Ok(runnable) => { - let _ = dispatcher.wake_posted.swap(true, Ordering::AcqRel); - runnable.run(); - continue; - } - _ => { - break; + self.dispatcher.wake_posted.store(true, Ordering::Release); + + WindowsDispatcher::execute_runnable(runnable); } } } @@ -807,42 +880,37 @@ impl WindowsPlatformInner { } fn handle_dock_action_event(&self, action_idx: usize) -> Option { - let mut lock = self.state.borrow_mut(); - let mut callback = lock.callbacks.app_menu_action.take()?; - let Some(action) = lock + let Some(action) = self + .state .jump_list + .borrow() .dock_menus .get(action_idx) .map(|dock_menu| dock_menu.action.boxed_clone()) else { - lock.callbacks.app_menu_action = Some(callback); log::error!("Dock menu for index {action_idx} not found"); return Some(1); }; - drop(lock); - callback(&*action); - self.state.borrow_mut().callbacks.app_menu_action = Some(callback); + self.with_callback( + |callbacks| &callbacks.app_menu_action, + |callback| callback(&*action), + ); Some(0) } fn handle_keyboard_layout_change(&self) -> Option { - let mut callback = self - .state - .borrow_mut() - .callbacks - .keyboard_layout_change - .take()?; - callback(); - self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback); + self.with_callback( + |callbacks| &callbacks.keyboard_layout_change, + |callback| callback(), + ); Some(0) } fn handle_device_lost(&self, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); let directx_devices = lparam.0 as *const DirectXDevices; let directx_devices = unsafe { &*directx_devices }; - lock.directx_devices.take(); - lock.directx_devices = Some(directx_devices.clone()); + self.state.directx_devices.borrow_mut().take(); + *self.state.directx_devices.borrow_mut() = Some(directx_devices.clone()); Some(0) } @@ -866,18 +934,21 @@ pub(crate) struct WindowCreationInfo { pub(crate) windows_version: WindowsVersion, pub(crate) drop_target_helper: IDropTargetHelper, pub(crate) validation_number: usize, - pub(crate) main_receiver: flume::Receiver, + pub(crate) main_receiver: flume::Receiver, pub(crate) platform_window_handle: HWND, pub(crate) disable_direct_composition: bool, pub(crate) directx_devices: DirectXDevices, + /// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices + /// as resizing them has failed, causing us to have lost at least the render target. + pub(crate) invalidate_devices: Arc, } struct PlatformWindowCreateContext { inner: Option>>, raw_window_handles: std::sync::Weak>>, validation_number: usize, - main_sender: Option>, - main_receiver: Option>, + main_sender: Option>, + main_receiver: Option>, directx_devices: Option, dispatcher: Option>, } diff --git a/crates/gpui/src/platform/windows/system_settings.rs b/crates/gpui/src/platform/windows/system_settings.rs index b2bd289cd00979541f0176a4ccea6a52143b9ddd..f5ef5ce31ec23b69d1f009792c693e248d404b8e 100644 --- a/crates/gpui/src/platform/windows/system_settings.rs +++ b/crates/gpui/src/platform/windows/system_settings.rs @@ -1,4 +1,7 @@ -use std::ffi::{c_uint, c_void}; +use std::{ + cell::Cell, + ffi::{c_uint, c_void}, +}; use ::util::ResultExt; use windows::Win32::UI::{ @@ -15,18 +18,18 @@ use super::WindowsDisplay; /// Windows settings pulled from SystemParametersInfo /// https://learn.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-systemparametersinfow -#[derive(Default, Debug, Clone, Copy)] +#[derive(Default, Debug, Clone)] pub(crate) struct WindowsSystemSettings { pub(crate) mouse_wheel_settings: MouseWheelSettings, - pub(crate) auto_hide_taskbar_position: Option, + pub(crate) auto_hide_taskbar_position: Cell>, } -#[derive(Default, Debug, Clone, Copy)] +#[derive(Default, Debug, Clone)] pub(crate) struct MouseWheelSettings { /// SEE: SPI_GETWHEELSCROLLCHARS - pub(crate) wheel_scroll_chars: u32, + pub(crate) wheel_scroll_chars: Cell, /// SEE: SPI_GETWHEELSCROLLLINES - pub(crate) wheel_scroll_lines: u32, + pub(crate) wheel_scroll_lines: Cell, } impl WindowsSystemSettings { @@ -36,12 +39,13 @@ impl WindowsSystemSettings { settings } - fn init(&mut self, display: WindowsDisplay) { + fn init(&self, display: WindowsDisplay) { self.mouse_wheel_settings.update(); - self.auto_hide_taskbar_position = AutoHideTaskbarPosition::new(display).log_err().flatten(); + self.auto_hide_taskbar_position + .set(AutoHideTaskbarPosition::new(display).log_err().flatten()); } - pub(crate) fn update(&mut self, display: WindowsDisplay, wparam: usize) { + pub(crate) fn update(&self, display: WindowsDisplay, wparam: usize) { match wparam { // SPI_SETWORKAREA 47 => self.update_taskbar_position(display), @@ -51,22 +55,23 @@ impl WindowsSystemSettings { } } - fn update_mouse_wheel_settings(&mut self) { + fn update_mouse_wheel_settings(&self) { self.mouse_wheel_settings.update(); } - fn update_taskbar_position(&mut self, display: WindowsDisplay) { - self.auto_hide_taskbar_position = AutoHideTaskbarPosition::new(display).log_err().flatten(); + fn update_taskbar_position(&self, display: WindowsDisplay) { + self.auto_hide_taskbar_position + .set(AutoHideTaskbarPosition::new(display).log_err().flatten()); } } impl MouseWheelSettings { - fn update(&mut self) { + fn update(&self) { self.update_wheel_scroll_chars(); self.update_wheel_scroll_lines(); } - fn update_wheel_scroll_chars(&mut self) { + fn update_wheel_scroll_chars(&self) { let mut value = c_uint::default(); let result = unsafe { SystemParametersInfoW( @@ -77,12 +82,12 @@ impl MouseWheelSettings { ) }; - if result.log_err() != None && self.wheel_scroll_chars != value { - self.wheel_scroll_chars = value; + if result.log_err() != None && self.wheel_scroll_chars.get() != value { + self.wheel_scroll_chars.set(value); } } - fn update_wheel_scroll_lines(&mut self) { + fn update_wheel_scroll_lines(&self) { let mut value = c_uint::default(); let result = unsafe { SystemParametersInfoW( @@ -93,8 +98,8 @@ impl MouseWheelSettings { ) }; - if result.log_err() != None && self.wheel_scroll_lines != value { - self.wheel_scroll_lines = value; + if result.log_err() != None && self.wheel_scroll_lines.get() != value { + self.wheel_scroll_lines.set(value); } } } diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 0050fa4bc0e96b8702314f33637db67998b5941d..7ef92b4150e69424b68e9417dda377aa7f2e9cc0 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -1,24 +1,24 @@ #![deny(unsafe_op_in_unsafe_fn)] use std::{ - cell::RefCell, + cell::{Cell, RefCell}, num::NonZeroIsize, path::PathBuf, rc::{Rc, Weak}, str::FromStr, - sync::{Arc, Once}, + sync::{Arc, Once, atomic::AtomicBool}, time::{Duration, Instant}, }; use ::util::ResultExt; use anyhow::{Context as _, Result}; -use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use raw_window_handle as rwh; use smallvec::SmallVec; use windows::{ Win32::{ Foundation::*, + Graphics::Dwm::*, Graphics::Gdi::*, System::{Com::*, LibraryLoader::*, Ole::*, SystemServices::*}, UI::{Controls::*, HiDpi::*, Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*}, @@ -30,47 +30,58 @@ use crate::*; pub(crate) struct WindowsWindow(pub Rc); +impl std::ops::Deref for WindowsWindow { + type Target = WindowsWindowInner; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + pub struct WindowsWindowState { - pub origin: Point, - pub logical_size: Size, + pub origin: Cell>, + pub logical_size: Cell>, pub min_size: Option>, - pub fullscreen_restore_bounds: Bounds, + pub fullscreen_restore_bounds: Cell>, pub border_offset: WindowBorderOffset, - pub appearance: WindowAppearance, - pub scale_factor: f32, - pub restore_from_minimized: Option>, + pub appearance: Cell, + pub scale_factor: Cell, + pub restore_from_minimized: Cell>>, pub callbacks: Callbacks, - pub input_handler: Option, - pub pending_surrogate: Option, - pub last_reported_modifiers: Option, - pub last_reported_capslock: Option, - pub hovered: bool, + pub input_handler: Cell>, + pub pending_surrogate: Cell>, + pub last_reported_modifiers: Cell>, + pub last_reported_capslock: Cell>, + pub hovered: Cell, - pub renderer: DirectXRenderer, + pub renderer: RefCell, pub click_state: ClickState, - pub current_cursor: Option, - pub nc_button_pressed: Option, - - pub display: WindowsDisplay, - fullscreen: Option, - initial_placement: Option, + pub current_cursor: Cell>, + pub nc_button_pressed: Cell>, + + pub display: Cell, + /// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices + /// as resizing them has failed, causing us to have lost at least the render target. + pub invalidate_devices: Arc, + fullscreen: Cell>, + initial_placement: Cell>, hwnd: HWND, } pub(crate) struct WindowsWindowInner { hwnd: HWND, drop_target_helper: IDropTargetHelper, - pub(crate) state: RefCell, - pub(crate) system_settings: RefCell, + pub(crate) state: WindowsWindowState, + system_settings: WindowsSystemSettings, pub(crate) handle: AnyWindowHandle, pub(crate) hide_title_bar: bool, pub(crate) is_movable: bool, pub(crate) executor: ForegroundExecutor, pub(crate) windows_version: WindowsVersion, pub(crate) validation_number: usize, - pub(crate) main_receiver: flume::Receiver, + pub(crate) main_receiver: flume::Receiver, pub(crate) platform_window_handle: HWND, } @@ -84,6 +95,7 @@ impl WindowsWindowState { min_size: Option>, appearance: WindowAppearance, disable_direct_composition: bool, + invalidate_devices: Arc, ) -> Result { let scale_factor = { let monitor_dpi = unsafe { GetDpiForWindow(hwnd) } as f32; @@ -117,34 +129,35 @@ impl WindowsWindowState { let initial_placement = None; Ok(Self { - origin, - logical_size, - fullscreen_restore_bounds, + origin: Cell::new(origin), + logical_size: Cell::new(logical_size), + fullscreen_restore_bounds: Cell::new(fullscreen_restore_bounds), border_offset, - appearance, - scale_factor, - restore_from_minimized, + appearance: Cell::new(appearance), + scale_factor: Cell::new(scale_factor), + restore_from_minimized: Cell::new(restore_from_minimized), min_size, callbacks, - input_handler, - pending_surrogate, - last_reported_modifiers, - last_reported_capslock, - hovered, - renderer, + input_handler: Cell::new(input_handler), + pending_surrogate: Cell::new(pending_surrogate), + last_reported_modifiers: Cell::new(last_reported_modifiers), + last_reported_capslock: Cell::new(last_reported_capslock), + hovered: Cell::new(hovered), + renderer: RefCell::new(renderer), click_state, - current_cursor, - nc_button_pressed, - display, - fullscreen, - initial_placement, + current_cursor: Cell::new(current_cursor), + nc_button_pressed: Cell::new(nc_button_pressed), + display: Cell::new(display), + fullscreen: Cell::new(fullscreen), + initial_placement: Cell::new(initial_placement), hwnd, + invalidate_devices, }) } #[inline] pub(crate) fn is_fullscreen(&self) -> bool { - self.fullscreen.is_some() + self.fullscreen.get().is_some() } pub(crate) fn is_maximized(&self) -> bool { @@ -153,8 +166,8 @@ impl WindowsWindowState { fn bounds(&self) -> Bounds { Bounds { - origin: self.origin, - size: self.logical_size, + origin: self.origin.get(), + size: self.logical_size.get(), } } @@ -173,8 +186,8 @@ impl WindowsWindowState { ( calculate_client_rect( placement.rcNormalPosition, - self.border_offset, - self.scale_factor, + &self.border_offset, + self.scale_factor.get(), ), placement.showCmd == SW_SHOWMAXIMIZED.0 as u32, ) @@ -184,7 +197,7 @@ impl WindowsWindowState { let (bounds, maximized) = self.calculate_window_bounds(); if self.is_fullscreen() { - WindowBounds::Fullscreen(self.fullscreen_restore_bounds) + WindowBounds::Fullscreen(self.fullscreen_restore_bounds.get()) } else if maximized { WindowBounds::Maximized(bounds) } else { @@ -197,13 +210,13 @@ impl WindowsWindowState { /// Currently, GPUI uses the logical size of the app to handle mouse interactions (such as /// whether the mouse collides with other elements of GPUI). fn content_size(&self) -> Size { - self.logical_size + self.logical_size.get() } } impl WindowsWindowInner { fn new(context: &mut WindowCreateContext, hwnd: HWND, cs: &CREATESTRUCTW) -> Result> { - let state = RefCell::new(WindowsWindowState::new( + let state = WindowsWindowState::new( hwnd, &context.directx_devices, cs, @@ -212,7 +225,8 @@ impl WindowsWindowInner { context.min_size, context.appearance, context.disable_direct_composition, - )?); + context.invalidate_devices.clone(), + )?; Ok(Rc::new(Self { hwnd, @@ -226,7 +240,7 @@ impl WindowsWindowInner { validation_number: context.validation_number, main_receiver: context.main_receiver.clone(), platform_window_handle: context.platform_window_handle, - system_settings: RefCell::new(WindowsSystemSettings::new(context.display)), + system_settings: WindowsSystemSettings::new(context.display), })) } @@ -234,19 +248,17 @@ impl WindowsWindowInner { let this = self.clone(); self.executor .spawn(async move { - let mut lock = this.state.borrow_mut(); let StyleAndBounds { style, x, y, cx, cy, - } = match lock.fullscreen.take() { + } = match this.state.fullscreen.take() { Some(state) => state, None => { - let (window_bounds, _) = lock.calculate_window_bounds(); - lock.fullscreen_restore_bounds = window_bounds; - drop(lock); + let (window_bounds, _) = this.state.calculate_window_bounds(); + this.state.fullscreen_restore_bounds.set(window_bounds); let style = WINDOW_STYLE(unsafe { get_window_long(this.hwnd, GWL_STYLE) } as _); @@ -254,22 +266,20 @@ impl WindowsWindowInner { unsafe { GetWindowRect(this.hwnd, &mut rc) } .context("failed to get window rect") .log_err(); - - lock = this.state.borrow_mut(); - let _ = lock.fullscreen.insert(StyleAndBounds { + let _ = this.state.fullscreen.set(Some(StyleAndBounds { style, x: rc.left, y: rc.top, cx: rc.right - rc.left, cy: rc.bottom - rc.top, - }); + })); let style = style & !(WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX | WS_CAPTION); - let physical_bounds = lock.display.physical_bounds(); + let physical_bounds = this.state.display.get().physical_bounds(); StyleAndBounds { style, x: physical_bounds.left().0, @@ -279,7 +289,6 @@ impl WindowsWindowInner { } } }; - drop(lock); unsafe { set_window_long(this.hwnd, GWL_STYLE, style.0 as isize) }; unsafe { SetWindowPos( @@ -298,7 +307,7 @@ impl WindowsWindowInner { } fn set_window_placement(self: &Rc) -> Result<()> { - let Some(open_status) = self.state.borrow_mut().initial_placement.take() else { + let Some(open_status) = self.state.initial_placement.take() else { return Ok(()); }; match open_status.state { @@ -321,20 +330,24 @@ impl WindowsWindowInner { } Ok(()) } + + pub(crate) fn system_settings(&self) -> &WindowsSystemSettings { + &self.system_settings + } } #[derive(Default)] pub(crate) struct Callbacks { - pub(crate) request_frame: Option>, - pub(crate) input: Option DispatchEventResult>>, - pub(crate) active_status_change: Option>, - pub(crate) hovered_status_change: Option>, - pub(crate) resize: Option, f32)>>, - pub(crate) moved: Option>, - pub(crate) should_close: Option bool>>, - pub(crate) close: Option>, - pub(crate) hit_test_window_control: Option Option>>, - pub(crate) appearance_changed: Option>, + pub(crate) request_frame: Cell>>, + pub(crate) input: Cell DispatchEventResult>>>, + pub(crate) active_status_change: Cell>>, + pub(crate) hovered_status_change: Cell>>, + pub(crate) resize: Cell, f32)>>>, + pub(crate) moved: Cell>>, + pub(crate) should_close: Cell bool>>>, + pub(crate) close: Cell>>, + pub(crate) hit_test_window_control: Cell Option>>>, + pub(crate) appearance_changed: Cell>>, } struct WindowCreateContext { @@ -349,11 +362,12 @@ struct WindowCreateContext { windows_version: WindowsVersion, drop_target_helper: IDropTargetHelper, validation_number: usize, - main_receiver: flume::Receiver, + main_receiver: flume::Receiver, platform_window_handle: HWND, appearance: WindowAppearance, disable_direct_composition: bool, directx_devices: DirectXDevices, + invalidate_devices: Arc, } impl WindowsWindow { @@ -373,6 +387,7 @@ impl WindowsWindow { platform_window_handle, disable_direct_composition, directx_devices, + invalidate_devices, } = creation_info; register_window_class(icon); let hide_title_bar = params @@ -433,6 +448,7 @@ impl WindowsWindow { appearance, disable_direct_composition, directx_devices, + invalidate_devices, }; let creation_result = unsafe { CreateWindowExW( @@ -459,21 +475,21 @@ impl WindowsWindow { register_drag_drop(&this)?; configure_dwm_dark_mode(hwnd, appearance); - this.state.borrow_mut().border_offset.update(hwnd)?; + this.state.border_offset.update(hwnd)?; let placement = retrieve_window_placement( hwnd, display, params.bounds, - this.state.borrow().scale_factor, - this.state.borrow().border_offset, + this.state.scale_factor.get(), + &this.state.border_offset, )?; if params.show { unsafe { SetWindowPlacement(hwnd, &placement)? }; } else { - this.state.borrow_mut().initial_placement = Some(WindowOpenStatus { + this.state.initial_placement.set(Some(WindowOpenStatus { placement, state: WindowOpenState::Windowed, - }); + })); } Ok(Self(this)) @@ -516,15 +532,15 @@ impl Drop for WindowsWindow { impl PlatformWindow for WindowsWindow { fn bounds(&self) -> Bounds { - self.0.state.borrow().bounds() + self.state.bounds() } fn is_maximized(&self) -> bool { - self.0.state.borrow().is_maximized() + self.state.is_maximized() } fn window_bounds(&self) -> WindowBounds { - self.0.state.borrow().window_bounds() + self.state.window_bounds() } /// get the logical size of the app's drawable area. @@ -532,14 +548,14 @@ impl PlatformWindow for WindowsWindow { /// Currently, GPUI uses the logical size of the app to handle mouse interactions (such as /// whether the mouse collides with other elements of GPUI). fn content_size(&self) -> Size { - self.0.state.borrow().content_size() + self.state.content_size() } fn resize(&mut self, size: Size) { let hwnd = self.0.hwnd; let bounds = crate::bounds(self.bounds().origin, size).to_device_pixels(self.scale_factor()); - let rect = calculate_window_rect(bounds, self.0.state.borrow().border_offset); + let rect = calculate_window_rect(bounds, &self.state.border_offset); self.0 .executor @@ -562,15 +578,15 @@ impl PlatformWindow for WindowsWindow { } fn scale_factor(&self) -> f32 { - self.0.state.borrow().scale_factor + self.state.scale_factor.get() } fn appearance(&self) -> WindowAppearance { - self.0.state.borrow().appearance + self.state.appearance.get() } fn display(&self) -> Option> { - Some(Rc::new(self.0.state.borrow().display)) + Some(Rc::new(self.state.display.get())) } fn mouse_position(&self) -> Point { @@ -595,11 +611,11 @@ impl PlatformWindow for WindowsWindow { } fn set_input_handler(&mut self, input_handler: PlatformInputHandler) { - self.0.state.borrow_mut().input_handler = Some(input_handler); + self.state.input_handler.set(Some(input_handler)); } fn take_input_handler(&mut self) -> Option { - self.0.state.borrow_mut().input_handler.take() + self.state.input_handler.take() } fn prompt( @@ -745,7 +761,7 @@ impl PlatformWindow for WindowsWindow { } fn is_hovered(&self) -> bool { - self.0.state.borrow().hovered + self.state.hovered.get() } fn set_title(&mut self, title: &str) { @@ -757,20 +773,26 @@ impl PlatformWindow for WindowsWindow { fn set_background_appearance(&self, background_appearance: WindowBackgroundAppearance) { let hwnd = self.0.hwnd; + // using Dwm APIs for Mica and MicaAlt backdrops. + // others follow the set_window_composition_attribute approach match background_appearance { WindowBackgroundAppearance::Opaque => { - // ACCENT_DISABLED set_window_composition_attribute(hwnd, None, 0); } WindowBackgroundAppearance::Transparent => { - // Use ACCENT_ENABLE_TRANSPARENTGRADIENT for transparent background set_window_composition_attribute(hwnd, None, 2); } WindowBackgroundAppearance::Blurred => { - // Enable acrylic blur - // ACCENT_ENABLE_ACRYLICBLURBEHIND set_window_composition_attribute(hwnd, Some((0, 0, 0, 0)), 4); } + WindowBackgroundAppearance::MicaBackdrop => { + // DWMSBT_MAINWINDOW => MicaBase + dwm_set_window_composition_attribute(hwnd, 2); + } + WindowBackgroundAppearance::MicaAltBackdrop => { + // DWMSBT_TABBEDWINDOW => MicaAlt + dwm_set_window_composition_attribute(hwnd, 4); + } } } @@ -782,8 +804,9 @@ impl PlatformWindow for WindowsWindow { unsafe { if IsWindowVisible(self.0.hwnd).as_bool() { ShowWindowAsync(self.0.hwnd, SW_MAXIMIZE).ok().log_err(); - } else if let Some(status) = self.0.state.borrow_mut().initial_placement.as_mut() { + } else if let Some(mut status) = self.state.initial_placement.take() { status.state = WindowOpenState::Maximized; + self.state.initial_placement.set(Some(status)); } } } @@ -791,61 +814,78 @@ impl PlatformWindow for WindowsWindow { fn toggle_fullscreen(&self) { if unsafe { IsWindowVisible(self.0.hwnd).as_bool() } { self.0.toggle_fullscreen(); - } else if let Some(status) = self.0.state.borrow_mut().initial_placement.as_mut() { + } else if let Some(mut status) = self.state.initial_placement.take() { status.state = WindowOpenState::Fullscreen; + self.state.initial_placement.set(Some(status)); } } fn is_fullscreen(&self) -> bool { - self.0.state.borrow().is_fullscreen() + self.state.is_fullscreen() } fn on_request_frame(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.request_frame = Some(callback); + self.state.callbacks.request_frame.set(Some(callback)); } fn on_input(&self, callback: Box DispatchEventResult>) { - self.0.state.borrow_mut().callbacks.input = Some(callback); + self.state.callbacks.input.set(Some(callback)); } fn on_active_status_change(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.active_status_change = Some(callback); + self.0 + .state + .callbacks + .active_status_change + .set(Some(callback)); } fn on_hover_status_change(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + self.0 + .state + .callbacks + .hovered_status_change + .set(Some(callback)); } fn on_resize(&self, callback: Box, f32)>) { - self.0.state.borrow_mut().callbacks.resize = Some(callback); + self.state.callbacks.resize.set(Some(callback)); } fn on_moved(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.moved = Some(callback); + self.state.callbacks.moved.set(Some(callback)); } fn on_should_close(&self, callback: Box bool>) { - self.0.state.borrow_mut().callbacks.should_close = Some(callback); + self.state.callbacks.should_close.set(Some(callback)); } fn on_close(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.close = Some(callback); + self.state.callbacks.close.set(Some(callback)); } fn on_hit_test_window_control(&self, callback: Box Option>) { - self.0.state.borrow_mut().callbacks.hit_test_window_control = Some(callback); + self.0 + .state + .callbacks + .hit_test_window_control + .set(Some(callback)); } fn on_appearance_changed(&self, callback: Box) { - self.0.state.borrow_mut().callbacks.appearance_changed = Some(callback); + self.0 + .state + .callbacks + .appearance_changed + .set(Some(callback)); } fn draw(&self, scene: &Scene) { - self.0.state.borrow_mut().renderer.draw(scene).log_err(); + self.state.renderer.borrow_mut().draw(scene).log_err(); } fn sprite_atlas(&self) -> Arc { - self.0.state.borrow().renderer.sprite_atlas() + self.state.renderer.borrow().sprite_atlas() } fn get_raw_handle(&self) -> HWND { @@ -853,7 +893,7 @@ impl PlatformWindow for WindowsWindow { } fn gpu_specs(&self) -> Option { - self.0.state.borrow().renderer.gpu_specs().log_err() + self.state.renderer.borrow().gpu_specs().log_err() } fn update_ime_position(&self, _bounds: Bounds) { @@ -866,11 +906,9 @@ struct WindowsDragDropHandler(pub Rc); impl WindowsDragDropHandler { fn handle_drag_drop(&self, input: PlatformInput) { - let mut lock = self.0.state.borrow_mut(); - if let Some(mut func) = lock.callbacks.input.take() { - drop(lock); + if let Some(mut func) = self.0.state.callbacks.input.take() { func(input); - self.0.state.borrow_mut().callbacks.input = Some(func); + self.0.state.callbacks.input.set(Some(func)); } } } @@ -914,7 +952,7 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { ScreenToClient(self.0.hwnd, &mut cursor_position) .ok() .log_err(); - let scale_factor = self.0.state.borrow().scale_factor; + let scale_factor = self.0.state.scale_factor.get(); let input = PlatformInput::FileDrop(FileDropEvent::Entered { position: logical_point( cursor_position.x as f32, @@ -952,7 +990,7 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { .ok() .log_err(); } - let scale_factor = self.0.state.borrow().scale_factor; + let scale_factor = self.0.state.scale_factor.get(); let input = PlatformInput::FileDrop(FileDropEvent::Pending { position: logical_point( cursor_position.x as f32, @@ -994,7 +1032,7 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { .ok() .log_err(); } - let scale_factor = self.0.state.borrow().scale_factor; + let scale_factor = self.0.state.scale_factor.get(); let input = PlatformInput::FileDrop(FileDropEvent::Submit { position: logical_point( cursor_position.x as f32, @@ -1008,15 +1046,15 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { } } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub(crate) struct ClickState { - button: MouseButton, - last_click: Instant, - last_position: Point, - double_click_spatial_tolerance_width: i32, - double_click_spatial_tolerance_height: i32, - double_click_interval: Duration, - pub(crate) current_count: usize, + button: Cell, + last_click: Cell, + last_position: Cell>, + double_click_spatial_tolerance_width: Cell, + double_click_spatial_tolerance_height: Cell, + double_click_interval: Cell, + pub(crate) current_count: Cell, } impl ClickState { @@ -1026,61 +1064,59 @@ impl ClickState { let double_click_interval = Duration::from_millis(unsafe { GetDoubleClickTime() } as u64); ClickState { - button: MouseButton::Left, - last_click: Instant::now(), - last_position: Point::default(), - double_click_spatial_tolerance_width, - double_click_spatial_tolerance_height, - double_click_interval, - current_count: 0, + button: Cell::new(MouseButton::Left), + last_click: Cell::new(Instant::now()), + last_position: Cell::new(Point::default()), + double_click_spatial_tolerance_width: Cell::new(double_click_spatial_tolerance_width), + double_click_spatial_tolerance_height: Cell::new(double_click_spatial_tolerance_height), + double_click_interval: Cell::new(double_click_interval), + current_count: Cell::new(0), } } /// update self and return the needed click count - pub fn update(&mut self, button: MouseButton, new_position: Point) -> usize { - if self.button == button && self.is_double_click(new_position) { - self.current_count += 1; + pub fn update(&self, button: MouseButton, new_position: Point) -> usize { + if self.button.get() == button && self.is_double_click(new_position) { + self.current_count.update(|it| it + 1); } else { - self.current_count = 1; + self.current_count.set(1); } - self.last_click = Instant::now(); - self.last_position = new_position; - self.button = button; + self.last_click.set(Instant::now()); + self.last_position.set(new_position); + self.button.set(button); - self.current_count + self.current_count.get() } - pub fn system_update(&mut self, wparam: usize) { + pub fn system_update(&self, wparam: usize) { match wparam { // SPI_SETDOUBLECLKWIDTH - 29 => { - self.double_click_spatial_tolerance_width = - unsafe { GetSystemMetrics(SM_CXDOUBLECLK) } - } + 29 => self + .double_click_spatial_tolerance_width + .set(unsafe { GetSystemMetrics(SM_CXDOUBLECLK) }), // SPI_SETDOUBLECLKHEIGHT - 30 => { - self.double_click_spatial_tolerance_height = - unsafe { GetSystemMetrics(SM_CYDOUBLECLK) } - } + 30 => self + .double_click_spatial_tolerance_height + .set(unsafe { GetSystemMetrics(SM_CYDOUBLECLK) }), // SPI_SETDOUBLECLICKTIME - 32 => { - self.double_click_interval = - Duration::from_millis(unsafe { GetDoubleClickTime() } as u64) - } + 32 => self + .double_click_interval + .set(Duration::from_millis(unsafe { GetDoubleClickTime() } as u64)), _ => {} } } #[inline] fn is_double_click(&self, new_position: Point) -> bool { - let diff = self.last_position - new_position; + let diff = self.last_position.get() - new_position; - self.last_click.elapsed() < self.double_click_interval - && diff.x.0.abs() <= self.double_click_spatial_tolerance_width - && diff.y.0.abs() <= self.double_click_spatial_tolerance_height + self.last_click.get().elapsed() < self.double_click_interval.get() + && diff.x.0.abs() <= self.double_click_spatial_tolerance_width.get() + && diff.y.0.abs() <= self.double_click_spatial_tolerance_height.get() } } +#[derive(Copy, Clone)] struct StyleAndBounds { style: WINDOW_STYLE, x: i32, @@ -1106,14 +1142,14 @@ struct AccentPolicy { type Color = (u8, u8, u8, u8); -#[derive(Debug, Default, Clone, Copy)] +#[derive(Debug, Default, Clone)] pub(crate) struct WindowBorderOffset { - pub(crate) width_offset: i32, - pub(crate) height_offset: i32, + pub(crate) width_offset: Cell, + pub(crate) height_offset: Cell, } impl WindowBorderOffset { - pub(crate) fn update(&mut self, hwnd: HWND) -> anyhow::Result<()> { + pub(crate) fn update(&self, hwnd: HWND) -> anyhow::Result<()> { let window_rect = unsafe { let mut rect = std::mem::zeroed(); GetWindowRect(hwnd, &mut rect)?; @@ -1124,19 +1160,21 @@ impl WindowBorderOffset { GetClientRect(hwnd, &mut rect)?; rect }; - self.width_offset = - (window_rect.right - window_rect.left) - (client_rect.right - client_rect.left); - self.height_offset = - (window_rect.bottom - window_rect.top) - (client_rect.bottom - client_rect.top); + self.width_offset + .set((window_rect.right - window_rect.left) - (client_rect.right - client_rect.left)); + self.height_offset + .set((window_rect.bottom - window_rect.top) - (client_rect.bottom - client_rect.top)); Ok(()) } } +#[derive(Clone)] struct WindowOpenStatus { placement: WINDOWPLACEMENT, state: WindowOpenState, } +#[derive(Clone, Copy)] enum WindowOpenState { Maximized, Fullscreen, @@ -1246,7 +1284,7 @@ fn register_drag_drop(window: &Rc) -> Result<()> { Ok(()) } -fn calculate_window_rect(bounds: Bounds, border_offset: WindowBorderOffset) -> RECT { +fn calculate_window_rect(bounds: Bounds, border_offset: &WindowBorderOffset) -> RECT { // NOTE: // The reason we're not using `AdjustWindowRectEx()` here is // that the size reported by this function is incorrect. @@ -1260,10 +1298,10 @@ fn calculate_window_rect(bounds: Bounds, border_offset: WindowBord right: bounds.right().0, bottom: bounds.bottom().0, }; - let left_offset = border_offset.width_offset / 2; - let top_offset = border_offset.height_offset / 2; - let right_offset = border_offset.width_offset - left_offset; - let bottom_offset = border_offset.height_offset - top_offset; + let left_offset = border_offset.width_offset.get() / 2; + let top_offset = border_offset.height_offset.get() / 2; + let right_offset = border_offset.width_offset.get() - left_offset; + let bottom_offset = border_offset.height_offset.get() - top_offset; rect.left -= left_offset; rect.top -= top_offset; rect.right += right_offset; @@ -1273,13 +1311,13 @@ fn calculate_window_rect(bounds: Bounds, border_offset: WindowBord fn calculate_client_rect( rect: RECT, - border_offset: WindowBorderOffset, + border_offset: &WindowBorderOffset, scale_factor: f32, ) -> Bounds { - let left_offset = border_offset.width_offset / 2; - let top_offset = border_offset.height_offset / 2; - let right_offset = border_offset.width_offset - left_offset; - let bottom_offset = border_offset.height_offset - top_offset; + let left_offset = border_offset.width_offset.get() / 2; + let top_offset = border_offset.height_offset.get() / 2; + let right_offset = border_offset.width_offset.get() - left_offset; + let bottom_offset = border_offset.height_offset.get() - top_offset; let left = rect.left + left_offset; let top = rect.top + top_offset; let right = rect.right - right_offset; @@ -1296,7 +1334,7 @@ fn retrieve_window_placement( display: WindowsDisplay, initial_bounds: Bounds, scale_factor: f32, - border_offset: WindowBorderOffset, + border_offset: &WindowBorderOffset, ) -> Result { let mut placement = WINDOWPLACEMENT { length: std::mem::size_of::() as u32, @@ -1314,9 +1352,34 @@ fn retrieve_window_placement( Ok(placement) } +fn dwm_set_window_composition_attribute(hwnd: HWND, backdrop_type: u32) { + let mut version = unsafe { std::mem::zeroed() }; + let status = unsafe { windows::Wdk::System::SystemServices::RtlGetVersion(&mut version) }; + + // DWMWA_SYSTEMBACKDROP_TYPE is available only on version 22621 or later + // using SetWindowCompositionAttributeType as a fallback + if !status.is_ok() || version.dwBuildNumber < 22621 { + return; + } + + unsafe { + let result = DwmSetWindowAttribute( + hwnd, + DWMWA_SYSTEMBACKDROP_TYPE, + &backdrop_type as *const _ as *const _, + std::mem::size_of_val(&backdrop_type) as u32, + ); + + if !result.is_ok() { + return; + } + } +} + fn set_window_composition_attribute(hwnd: HWND, color: Option, state: u32) { let mut version = unsafe { std::mem::zeroed() }; let status = unsafe { windows::Wdk::System::SystemServices::RtlGetVersion(&mut version) }; + if !status.is_ok() || version.dwBuildNumber < 17763 { return; } @@ -1381,7 +1444,9 @@ mod tests { state.update(MouseButton::Left, point(DevicePixels(0), DevicePixels(0))), 2 ); - state.last_click -= Duration::from_millis(700); + state + .last_click + .update(|it| it - Duration::from_millis(700)); assert_eq!( state.update(MouseButton::Left, point(DevicePixels(0), DevicePixels(0))), 1 diff --git a/crates/gpui/src/profiler.rs b/crates/gpui/src/profiler.rs new file mode 100644 index 0000000000000000000000000000000000000000..73f435d7e798c78d6c7320a49da804ebe703c434 --- /dev/null +++ b/crates/gpui/src/profiler.rs @@ -0,0 +1,234 @@ +use std::{ + cell::LazyCell, + hash::Hasher, + hash::{DefaultHasher, Hash}, + sync::Arc, + thread::ThreadId, + time::Instant, +}; + +use serde::{Deserialize, Serialize}; + +#[doc(hidden)] +#[derive(Debug, Copy, Clone)] +pub struct TaskTiming { + pub location: &'static core::panic::Location<'static>, + pub start: Instant, + pub end: Option, +} + +#[doc(hidden)] +#[derive(Debug, Clone)] +pub struct ThreadTaskTimings { + pub thread_name: Option, + pub thread_id: ThreadId, + pub timings: Vec, +} + +impl ThreadTaskTimings { + pub(crate) fn convert(timings: &[GlobalThreadTimings]) -> Vec { + timings + .iter() + .filter_map(|t| match t.timings.upgrade() { + Some(timings) => Some((t.thread_id, timings)), + _ => None, + }) + .map(|(thread_id, timings)| { + let timings = timings.lock(); + let thread_name = timings.thread_name.clone(); + let timings = &timings.timings; + + let mut vec = Vec::with_capacity(timings.len()); + + let (s1, s2) = timings.as_slices(); + vec.extend_from_slice(s1); + vec.extend_from_slice(s2); + + ThreadTaskTimings { + thread_name, + thread_id, + timings: vec, + } + }) + .collect() + } +} + +/// Serializable variant of [`core::panic::Location`] +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub struct SerializedLocation<'a> { + /// Name of the source file + pub file: &'a str, + /// Line in the source file + pub line: u32, + /// Column in the source file + pub column: u32, +} + +impl<'a> From<&'a core::panic::Location<'a>> for SerializedLocation<'a> { + fn from(value: &'a core::panic::Location<'a>) -> Self { + SerializedLocation { + file: value.file(), + line: value.line(), + column: value.column(), + } + } +} + +/// Serializable variant of [`TaskTiming`] +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SerializedTaskTiming<'a> { + /// Location of the timing + #[serde(borrow)] + pub location: SerializedLocation<'a>, + /// Time at which the measurement was reported in nanoseconds + pub start: u128, + /// Duration of the measurement in nanoseconds + pub duration: u128, +} + +impl<'a> SerializedTaskTiming<'a> { + /// Convert an array of [`TaskTiming`] into their serializable format + /// + /// # Params + /// + /// `anchor` - [`Instant`] that should be earlier than all timings to use as base anchor + pub fn convert(anchor: Instant, timings: &[TaskTiming]) -> Vec> { + let serialized = timings + .iter() + .map(|timing| { + let start = timing.start.duration_since(anchor).as_nanos(); + let duration = timing + .end + .unwrap_or_else(|| Instant::now()) + .duration_since(timing.start) + .as_nanos(); + SerializedTaskTiming { + location: timing.location.into(), + start, + duration, + } + }) + .collect::>(); + + serialized + } +} + +/// Serializable variant of [`ThreadTaskTimings`] +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SerializedThreadTaskTimings<'a> { + /// Thread name + pub thread_name: Option, + /// Hash of the thread id + pub thread_id: u64, + /// Timing records for this thread + #[serde(borrow)] + pub timings: Vec>, +} + +impl<'a> SerializedThreadTaskTimings<'a> { + /// Convert [`ThreadTaskTimings`] into their serializable format + /// + /// # Params + /// + /// `anchor` - [`Instant`] that should be earlier than all timings to use as base anchor + pub fn convert( + anchor: Instant, + timings: ThreadTaskTimings, + ) -> SerializedThreadTaskTimings<'static> { + let serialized_timings = SerializedTaskTiming::convert(anchor, &timings.timings); + + let mut hasher = DefaultHasher::new(); + timings.thread_id.hash(&mut hasher); + let thread_id = hasher.finish(); + + SerializedThreadTaskTimings { + thread_name: timings.thread_name, + thread_id, + timings: serialized_timings, + } + } +} + +// Allow 20mb of task timing entries +const MAX_TASK_TIMINGS: usize = (20 * 1024 * 1024) / core::mem::size_of::(); + +pub(crate) type TaskTimings = circular_buffer::CircularBuffer; +pub(crate) type GuardedTaskTimings = spin::Mutex; + +pub(crate) struct GlobalThreadTimings { + pub thread_id: ThreadId, + pub timings: std::sync::Weak, +} + +pub(crate) static GLOBAL_THREAD_TIMINGS: spin::Mutex> = + spin::Mutex::new(Vec::new()); + +thread_local! { + pub(crate) static THREAD_TIMINGS: LazyCell> = LazyCell::new(|| { + let current_thread = std::thread::current(); + let thread_name = current_thread.name(); + let thread_id = current_thread.id(); + let timings = ThreadTimings::new(thread_name.map(|e| e.to_string()), thread_id); + let timings = Arc::new(spin::Mutex::new(timings)); + + { + let timings = Arc::downgrade(&timings); + let global_timings = GlobalThreadTimings { + thread_id: std::thread::current().id(), + timings, + }; + GLOBAL_THREAD_TIMINGS.lock().push(global_timings); + } + + timings + }); +} + +pub(crate) struct ThreadTimings { + pub thread_name: Option, + pub thread_id: ThreadId, + pub timings: Box, +} + +impl ThreadTimings { + pub(crate) fn new(thread_name: Option, thread_id: ThreadId) -> Self { + ThreadTimings { + thread_name, + thread_id, + timings: TaskTimings::boxed(), + } + } +} + +impl Drop for ThreadTimings { + fn drop(&mut self) { + let mut thread_timings = GLOBAL_THREAD_TIMINGS.lock(); + + let Some((index, _)) = thread_timings + .iter() + .enumerate() + .find(|(_, t)| t.thread_id == self.thread_id) + else { + return; + }; + thread_timings.swap_remove(index); + } +} + +pub(crate) fn add_task_timing(timing: TaskTiming) { + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + + if let Some(last_timing) = timings.iter_mut().rev().next() { + if last_timing.location == timing.location { + last_timing.end = timing.end; + return; + } + } + + timings.push_back(timing); + }); +} diff --git a/crates/gpui/src/queue.rs b/crates/gpui/src/queue.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e9da710977ee80df1853791918eebe5e7f01096 --- /dev/null +++ b/crates/gpui/src/queue.rs @@ -0,0 +1,328 @@ +use std::{ + fmt, + iter::FusedIterator, + sync::{Arc, atomic::AtomicUsize}, +}; + +use rand::{Rng, SeedableRng, rngs::SmallRng}; + +use crate::Priority; + +struct PriorityQueues { + high_priority: Vec, + medium_priority: Vec, + low_priority: Vec, +} + +impl PriorityQueues { + fn is_empty(&self) -> bool { + self.high_priority.is_empty() + && self.medium_priority.is_empty() + && self.low_priority.is_empty() + } +} + +struct PriorityQueueState { + queues: parking_lot::Mutex>, + condvar: parking_lot::Condvar, + receiver_count: AtomicUsize, + sender_count: AtomicUsize, +} + +impl PriorityQueueState { + fn send(&self, priority: Priority, item: T) -> Result<(), SendError> { + if self + .receiver_count + .load(std::sync::atomic::Ordering::Relaxed) + == 0 + { + return Err(SendError(item)); + } + + let mut queues = self.queues.lock(); + match priority { + Priority::Realtime(_) => unreachable!(), + Priority::High => queues.high_priority.push(item), + Priority::Medium => queues.medium_priority.push(item), + Priority::Low => queues.low_priority.push(item), + }; + self.condvar.notify_one(); + Ok(()) + } + + fn recv<'a>(&'a self) -> Result>, RecvError> { + let mut queues = self.queues.lock(); + + let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed); + if queues.is_empty() && sender_count == 0 { + return Err(crate::queue::RecvError); + } + + while queues.is_empty() { + self.condvar.wait(&mut queues); + } + + Ok(queues) + } + + fn try_recv<'a>( + &'a self, + ) -> Result>>, RecvError> { + let mut queues = self.queues.lock(); + + let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed); + if queues.is_empty() && sender_count == 0 { + return Err(crate::queue::RecvError); + } + + if queues.is_empty() { + Ok(None) + } else { + Ok(Some(queues)) + } + } +} + +pub(crate) struct PriorityQueueSender { + state: Arc>, +} + +impl PriorityQueueSender { + fn new(state: Arc>) -> Self { + Self { state } + } + + pub(crate) fn send(&self, priority: Priority, item: T) -> Result<(), SendError> { + self.state.send(priority, item)?; + Ok(()) + } +} + +impl Drop for PriorityQueueSender { + fn drop(&mut self) { + self.state + .sender_count + .fetch_sub(1, std::sync::atomic::Ordering::AcqRel); + } +} + +pub(crate) struct PriorityQueueReceiver { + state: Arc>, + rand: SmallRng, + disconnected: bool, +} + +impl Clone for PriorityQueueReceiver { + fn clone(&self) -> Self { + self.state + .receiver_count + .fetch_add(1, std::sync::atomic::Ordering::AcqRel); + Self { + state: Arc::clone(&self.state), + rand: SmallRng::seed_from_u64(0), + disconnected: self.disconnected, + } + } +} + +pub(crate) struct SendError(T); + +impl fmt::Debug for SendError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("SendError").field(&self.0).finish() + } +} + +#[derive(Debug)] +pub(crate) struct RecvError; + +#[allow(dead_code)] +impl PriorityQueueReceiver { + pub(crate) fn new() -> (PriorityQueueSender, Self) { + let state = PriorityQueueState { + queues: parking_lot::Mutex::new(PriorityQueues { + high_priority: Vec::new(), + medium_priority: Vec::new(), + low_priority: Vec::new(), + }), + condvar: parking_lot::Condvar::new(), + receiver_count: AtomicUsize::new(1), + sender_count: AtomicUsize::new(1), + }; + let state = Arc::new(state); + + let sender = PriorityQueueSender::new(Arc::clone(&state)); + + let receiver = PriorityQueueReceiver { + state, + rand: SmallRng::seed_from_u64(0), + disconnected: false, + }; + + (sender, receiver) + } + + /// Tries to pop one element from the priority queue without blocking. + /// + /// This will early return if there are no elements in the queue. + /// + /// This method is best suited if you only intend to pop one element, for better performance + /// on large queues see [`Self::try_iter`] + /// + /// # Errors + /// + /// If the sender was dropped + pub(crate) fn try_pop(&mut self) -> Result, RecvError> { + self.pop_inner(false) + } + + /// Pops an element from the priority queue blocking if necessary. + /// + /// This method is best suited if you only intend to pop one element, for better performance + /// on large queues see [`Self::iter``] + /// + /// # Errors + /// + /// If the sender was dropped + pub(crate) fn pop(&mut self) -> Result { + self.pop_inner(true).map(|e| e.unwrap()) + } + + /// Returns an iterator over the elements of the queue + /// this iterator will end when all elements have been consumed and will not wait for new ones. + pub(crate) fn try_iter(self) -> TryIter { + TryIter { + receiver: self, + ended: false, + } + } + + /// Returns an iterator over the elements of the queue + /// this iterator will wait for new elements if the queue is empty. + pub(crate) fn iter(self) -> Iter { + Iter(self) + } + + #[inline(always)] + // algorithm is the loaded die from biased coin from + // https://www.keithschwarz.com/darts-dice-coins/ + fn pop_inner(&mut self, block: bool) -> Result, RecvError> { + use Priority as P; + + let mut queues = if !block { + let Some(queues) = self.state.try_recv()? else { + return Ok(None); + }; + queues + } else { + self.state.recv()? + }; + + let high = P::High.probability() * !queues.high_priority.is_empty() as u32; + let medium = P::Medium.probability() * !queues.medium_priority.is_empty() as u32; + let low = P::Low.probability() * !queues.low_priority.is_empty() as u32; + let mut mass = high + medium + low; //% + + if !queues.high_priority.is_empty() { + let flip = self.rand.random_ratio(P::High.probability(), mass); + if flip { + return Ok(queues.high_priority.pop()); + } + mass -= P::High.probability(); + } + + if !queues.medium_priority.is_empty() { + let flip = self.rand.random_ratio(P::Medium.probability(), mass); + if flip { + return Ok(queues.medium_priority.pop()); + } + mass -= P::Medium.probability(); + } + + if !queues.low_priority.is_empty() { + let flip = self.rand.random_ratio(P::Low.probability(), mass); + if flip { + return Ok(queues.low_priority.pop()); + } + } + + Ok(None) + } +} + +impl Drop for PriorityQueueReceiver { + fn drop(&mut self) { + self.state + .receiver_count + .fetch_sub(1, std::sync::atomic::Ordering::AcqRel); + } +} + +/// If None is returned the sender disconnected +pub(crate) struct Iter(PriorityQueueReceiver); +impl Iterator for Iter { + type Item = T; + + fn next(&mut self) -> Option { + self.0.pop().ok() + } +} +impl FusedIterator for Iter {} + +/// If None is returned there are no more elements in the queue +pub(crate) struct TryIter { + receiver: PriorityQueueReceiver, + ended: bool, +} +impl Iterator for TryIter { + type Item = Result; + + fn next(&mut self) -> Option { + if self.ended { + return None; + } + + let res = self.receiver.try_pop(); + self.ended = res.is_err(); + + res.transpose() + } +} +impl FusedIterator for TryIter {} + +#[cfg(test)] +mod tests { + use collections::HashSet; + + use super::*; + + #[test] + fn all_tasks_get_yielded() { + let (tx, mut rx) = PriorityQueueReceiver::new(); + tx.send(Priority::Medium, 20).unwrap(); + tx.send(Priority::High, 30).unwrap(); + tx.send(Priority::Low, 10).unwrap(); + tx.send(Priority::Medium, 21).unwrap(); + tx.send(Priority::High, 31).unwrap(); + + drop(tx); + + assert_eq!( + rx.iter().collect::>(), + [30, 31, 20, 21, 10].into_iter().collect::>() + ) + } + + #[test] + fn new_high_prio_task_get_scheduled_quickly() { + let (tx, mut rx) = PriorityQueueReceiver::new(); + for _ in 0..100 { + tx.send(Priority::Low, 1).unwrap(); + } + + assert_eq!(rx.pop().unwrap(), 1); + tx.send(Priority::High, 3).unwrap(); + assert_eq!(rx.pop().unwrap(), 3); + assert_eq!(rx.pop().unwrap(), 1); + } +} diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index 42f8f25e47620fe673720055037b7f91f44165a2..4d6e6f490d81d967692a3e9d8316af75a7a4d306 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -252,6 +252,7 @@ pub struct Style { pub box_shadow: Vec, /// The text style of this element + #[refineable] pub text: TextStyleRefinement, /// The mouse cursor style shown when the mouse pointer is over an element. @@ -264,6 +265,10 @@ pub struct Style { /// Equivalent to the Tailwind `grid-cols-` pub grid_cols: Option, + /// The grid columns with min-content minimum sizing. + /// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints. + pub grid_cols_min_content: Option, + /// The row span of this element /// Equivalent to the Tailwind `grid-rows-` pub grid_rows: Option, @@ -771,6 +776,7 @@ impl Default for Style { opacity: None, grid_rows: None, grid_cols: None, + grid_cols_min_content: None, grid_location: None, #[cfg(debug_assertions)] @@ -1469,4 +1475,21 @@ mod tests { ] ); } + + #[perf] + fn test_text_style_refinement() { + let mut style = Style::default(); + style.refine(&StyleRefinement::default().text_size(px(20.0))); + style.refine(&StyleRefinement::default().font_weight(FontWeight::SEMIBOLD)); + + assert_eq!( + Some(AbsoluteLength::from(px(20.0))), + style.text_style().unwrap().font_size + ); + + assert_eq!( + Some(FontWeight::SEMIBOLD), + style.text_style().unwrap().font_weight + ); + } } diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index f653f050c07848340c8da1fa0e01a2a4da985bdb..e8088a84d7fc141d0a320988c6399afe2b93ce07 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -1,8 +1,9 @@ use crate::{ self as gpui, AbsoluteLength, AlignContent, AlignItems, BorderStyle, CursorStyle, - DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontStyle, FontWeight, - GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle, StyleRefinement, - TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems, + DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontFeatures, FontStyle, + FontWeight, GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle, + StyleRefinement, TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, + relative, rems, }; pub use gpui_macros::{ border_style_methods, box_shadow_style_methods, cursor_style_methods, margin_style_methods, @@ -13,8 +14,9 @@ const ELLIPSIS: SharedString = SharedString::new_static("…"); /// A trait for elements that can be styled. /// Use this to opt-in to a utility CSS-like styling API. +// gate on rust-analyzer so rust-analyzer never needs to expand this macro, it takes up to 10 seconds to expand due to inefficiencies in rust-analyzers proc-macro srv #[cfg_attr( - any(feature = "inspector", debug_assertions), + all(any(feature = "inspector", debug_assertions), not(rust_analyzer)), gpui_macros::derive_inspector_reflection )] pub trait Styled: Sized { @@ -62,43 +64,33 @@ pub trait Styled: Sized { /// Sets the whitespace of the element to `normal`. /// [Docs](https://tailwindcss.com/docs/whitespace#normal) fn whitespace_normal(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .white_space = Some(WhiteSpace::Normal); + self.text_style().white_space = Some(WhiteSpace::Normal); self } /// Sets the whitespace of the element to `nowrap`. /// [Docs](https://tailwindcss.com/docs/whitespace#nowrap) fn whitespace_nowrap(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .white_space = Some(WhiteSpace::Nowrap); + self.text_style().white_space = Some(WhiteSpace::Nowrap); self } /// Sets the truncate overflowing text with an ellipsis (…) if needed. /// [Docs](https://tailwindcss.com/docs/text-overflow#ellipsis) fn text_ellipsis(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .text_overflow = Some(TextOverflow::Truncate(ELLIPSIS)); + self.text_style().text_overflow = Some(TextOverflow::Truncate(ELLIPSIS)); self } /// Sets the text overflow behavior of the element. fn text_overflow(mut self, overflow: TextOverflow) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .text_overflow = Some(overflow); + self.text_style().text_overflow = Some(overflow); self } /// Set the text alignment of the element. fn text_align(mut self, align: TextAlign) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .text_align = Some(align); + self.text_style().text_align = Some(align); self } @@ -126,7 +118,7 @@ pub trait Styled: Sized { /// Sets number of lines to show before truncating the text. /// [Docs](https://tailwindcss.com/docs/line-clamp) fn line_clamp(mut self, lines: usize) -> Self { - let mut text_style = self.text_style().get_or_insert_with(Default::default); + let mut text_style = self.text_style(); text_style.line_clamp = Some(lines); self.overflow_hidden() } @@ -394,7 +386,7 @@ pub trait Styled: Sized { } /// Returns a mutable reference to the text style that has been configured on this element. - fn text_style(&mut self) -> &mut Option { + fn text_style(&mut self) -> &mut TextStyleRefinement { let style: &mut StyleRefinement = self.style(); &mut style.text } @@ -403,7 +395,7 @@ pub trait Styled: Sized { /// /// This value cascades to its child elements. fn text_color(mut self, color: impl Into) -> Self { - self.text_style().get_or_insert_with(Default::default).color = Some(color.into()); + self.text_style().color = Some(color.into()); self } @@ -411,9 +403,7 @@ pub trait Styled: Sized { /// /// This value cascades to its child elements. fn font_weight(mut self, weight: FontWeight) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_weight = Some(weight); + self.text_style().font_weight = Some(weight); self } @@ -421,9 +411,7 @@ pub trait Styled: Sized { /// /// This value cascades to its child elements. fn text_bg(mut self, bg: impl Into) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .background_color = Some(bg.into()); + self.text_style().background_color = Some(bg.into()); self } @@ -431,97 +419,77 @@ pub trait Styled: Sized { /// /// This value cascades to its child elements. fn text_size(mut self, size: impl Into) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(size.into()); + self.text_style().font_size = Some(size.into()); self } /// Sets the text size to 'extra small'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_xs(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(0.75).into()); + self.text_style().font_size = Some(rems(0.75).into()); self } /// Sets the text size to 'small'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_sm(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(0.875).into()); + self.text_style().font_size = Some(rems(0.875).into()); self } /// Sets the text size to 'base'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_base(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(1.0).into()); + self.text_style().font_size = Some(rems(1.0).into()); self } /// Sets the text size to 'large'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_lg(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(1.125).into()); + self.text_style().font_size = Some(rems(1.125).into()); self } /// Sets the text size to 'extra large'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_xl(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(1.25).into()); + self.text_style().font_size = Some(rems(1.25).into()); self } /// Sets the text size to 'extra extra large'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_2xl(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(1.5).into()); + self.text_style().font_size = Some(rems(1.5).into()); self } /// Sets the text size to 'extra extra extra large'. /// [Docs](https://tailwindcss.com/docs/font-size#setting-the-font-size) fn text_3xl(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_size = Some(rems(1.875).into()); + self.text_style().font_size = Some(rems(1.875).into()); self } /// Sets the font style of the element to italic. /// [Docs](https://tailwindcss.com/docs/font-style#italicizing-text) fn italic(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_style = Some(FontStyle::Italic); + self.text_style().font_style = Some(FontStyle::Italic); self } /// Sets the font style of the element to normal (not italic). /// [Docs](https://tailwindcss.com/docs/font-style#displaying-text-normally) fn not_italic(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_style = Some(FontStyle::Normal); + self.text_style().font_style = Some(FontStyle::Normal); self } /// Sets the text decoration to underline. /// [Docs](https://tailwindcss.com/docs/text-decoration-line#underling-text) fn underline(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); style.underline = Some(UnderlineStyle { thickness: px(1.), ..Default::default() @@ -532,7 +500,7 @@ pub trait Styled: Sized { /// Sets the decoration of the text to have a line through it. /// [Docs](https://tailwindcss.com/docs/text-decoration-line#adding-a-line-through-text) fn line_through(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); style.strikethrough = Some(StrikethroughStyle { thickness: px(1.), ..Default::default() @@ -544,15 +512,13 @@ pub trait Styled: Sized { /// /// This value cascades to its child elements. fn text_decoration_none(mut self) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .underline = None; + self.text_style().underline = None; self } /// Sets the color for the underline on this element fn text_decoration_color(mut self, color: impl Into) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.color = Some(color.into()); self @@ -561,7 +527,7 @@ pub trait Styled: Sized { /// Sets the text decoration style to a solid line. /// [Docs](https://tailwindcss.com/docs/text-decoration-style) fn text_decoration_solid(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.wavy = false; self @@ -570,7 +536,7 @@ pub trait Styled: Sized { /// Sets the text decoration style to a wavy line. /// [Docs](https://tailwindcss.com/docs/text-decoration-style) fn text_decoration_wavy(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.wavy = true; self @@ -579,7 +545,7 @@ pub trait Styled: Sized { /// Sets the text decoration to be 0px thick. /// [Docs](https://tailwindcss.com/docs/text-decoration-thickness) fn text_decoration_0(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.thickness = px(0.); self @@ -588,7 +554,7 @@ pub trait Styled: Sized { /// Sets the text decoration to be 1px thick. /// [Docs](https://tailwindcss.com/docs/text-decoration-thickness) fn text_decoration_1(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.thickness = px(1.); self @@ -597,7 +563,7 @@ pub trait Styled: Sized { /// Sets the text decoration to be 2px thick. /// [Docs](https://tailwindcss.com/docs/text-decoration-thickness) fn text_decoration_2(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.thickness = px(2.); self @@ -606,7 +572,7 @@ pub trait Styled: Sized { /// Sets the text decoration to be 4px thick. /// [Docs](https://tailwindcss.com/docs/text-decoration-thickness) fn text_decoration_4(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.thickness = px(4.); self @@ -615,7 +581,7 @@ pub trait Styled: Sized { /// Sets the text decoration to be 8px thick. /// [Docs](https://tailwindcss.com/docs/text-decoration-thickness) fn text_decoration_8(mut self) -> Self { - let style = self.text_style().get_or_insert_with(Default::default); + let style = self.text_style(); let underline = style.underline.get_or_insert_with(Default::default); underline.thickness = px(8.); self @@ -623,9 +589,13 @@ pub trait Styled: Sized { /// Sets the font family of this element and its children. fn font_family(mut self, family_name: impl Into) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .font_family = Some(family_name.into()); + self.text_style().font_family = Some(family_name.into()); + self + } + + /// Sets the font features of this element and its children. + fn font_features(mut self, features: FontFeatures) -> Self { + self.text_style().font_features = Some(features); self } @@ -639,7 +609,7 @@ pub trait Styled: Sized { style, } = font; - let text_style = self.text_style().get_or_insert_with(Default::default); + let text_style = self.text_style(); text_style.font_family = Some(family); text_style.font_features = Some(features); text_style.font_weight = Some(weight); @@ -651,9 +621,7 @@ pub trait Styled: Sized { /// Sets the line height of this element and its children. fn line_height(mut self, line_height: impl Into) -> Self { - self.text_style() - .get_or_insert_with(Default::default) - .line_height = Some(line_height.into()); + self.text_style().line_height = Some(line_height.into()); self } @@ -669,6 +637,13 @@ pub trait Styled: Sized { self } + /// Sets the grid columns with min-content minimum sizing. + /// Unlike grid_cols, it won't shrink to width 0 in AvailableSpace::MinContent constraints. + fn grid_cols_min_content(mut self, cols: u16) -> Self { + self.style().grid_cols_min_content = Some(cols); + self + } + /// Sets the grid rows of this element. fn grid_rows(mut self, rows: u16) -> Self { self.style().grid_rows = Some(rows); diff --git a/crates/gpui/src/tab_stop.rs b/crates/gpui/src/tab_stop.rs index 8a95a3975af736d544e01cbf6e212994b8e7e8c6..a2050059634d202490e7156830fbe579d48c47ab 100644 --- a/crates/gpui/src/tab_stop.rs +++ b/crates/gpui/src/tab_stop.rs @@ -320,7 +320,7 @@ mod tests { let focus_map = Arc::new(FocusMap::default()); let mut tab_index_map = TabStopMap::default(); - let focus_handles = vec![ + let focus_handles = [ FocusHandle::new(&focus_map).tab_stop(true).tab_index(0), FocusHandle::new(&focus_map).tab_stop(true).tab_index(1), FocusHandle::new(&focus_map).tab_stop(true).tab_index(1), diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index 11cb0872861321c3c06c3f8a5bf79fdd30eb2275..99a50b87c8aa9f40a7694f1c2084b10f6d0a9315 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -8,6 +8,7 @@ use std::{fmt::Debug, ops::Range}; use taffy::{ TaffyTree, TraversePartialTree as _, geometry::{Point as TaffyPoint, Rect as TaffyRect, Size as TaffySize}, + prelude::min_content, style::AvailableSpace as TaffyAvailableSpace, tree::NodeId, }; @@ -314,6 +315,14 @@ impl ToTaffy for Style { .unwrap_or_default() } + fn to_grid_repeat_min_content( + unit: &Option, + ) -> Vec> { + // grid-template-columns: repeat(, minmax(min-content, 1fr)); + unit.map(|count| vec![repeat(count, vec![minmax(min_content(), fr(1.0))])]) + .unwrap_or_default() + } + taffy::style::Style { display: self.display.into(), overflow: self.overflow.into(), @@ -338,7 +347,11 @@ impl ToTaffy for Style { flex_grow: self.flex_grow, flex_shrink: self.flex_shrink, grid_template_rows: to_grid_repeat(&self.grid_rows), - grid_template_columns: to_grid_repeat(&self.grid_cols), + grid_template_columns: if self.grid_cols_min_content.is_some() { + to_grid_repeat_min_content(&self.grid_cols_min_content) + } else { + to_grid_repeat(&self.grid_cols) + }, grid_row: self .grid_location .as_ref() diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 5ae72d2be1688893374e16a55445558b5bc33040..2a5711a01a9c8f2874cea4803fc517089cafd0fe 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -69,7 +69,10 @@ pub fn run_test( std::mem::forget(error); } else { if is_multiple_runs { - eprintln!("failing seed: {}", seed); + eprintln!("failing seed: {seed}"); + eprintln!( + "You can rerun from this seed by setting the environmental variable SEED to {seed}" + ); } if let Some(on_fail_fn) = on_fail_fn { on_fail_fn() diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 39f68e3226a81633fbd82d1eab989f3a2893d9da..070e434dc992af4ca5b28f6e55aa0aa3cb9e5790 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -550,7 +550,6 @@ impl WindowTextSystem { force_width: Option, ) -> Arc { let mut last_run = None::<&TextRun>; - let mut last_font: Option = None; let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); font_runs.clear(); @@ -568,14 +567,13 @@ impl WindowTextSystem { true }; + let font_id = self.resolve_font(&run.font); if let Some(font_run) = font_runs.last_mut() - && Some(font_run.font_id) == last_font + && font_id == font_run.font_id && !decoration_changed { font_run.len += run.len; } else { - let font_id = self.resolve_font(&run.font); - last_font = Some(font_id); font_runs.push(FontRun { len: run.len, font_id, diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index 189a3e85c6b4fed52eddb45d5fa151314830c0e9..84618eccc43dc3f189d3d49ea22b9d98f5ad9f85 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -369,16 +369,17 @@ fn paint_line( let content_mask = window.content_mask(); if max_glyph_bounds.intersects(&content_mask.bounds) { + let vertical_offset = point(px(0.0), glyph.position.y); if glyph.is_emoji { window.paint_emoji( - glyph_origin + baseline_offset, + glyph_origin + baseline_offset + vertical_offset, run.font_id, glyph.id, layout.font_size, )?; } else { window.paint_glyph( - glyph_origin + baseline_offset, + glyph_origin + baseline_offset + vertical_offset, run.font_id, glyph.id, layout.font_size, diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 61edd614d804434d414b34a9804e51b0b0148ea4..375a9bdc7bccdddb9d34409c5ced138b2d5aebd2 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -54,9 +54,25 @@ pub struct ShapedGlyph { } impl LineLayout { + /// The index for the character at the given x coordinate + pub fn index_for_x(&self, x: Pixels) -> Option { + if x >= self.width { + None + } else { + for run in self.runs.iter().rev() { + for glyph in run.glyphs.iter().rev() { + if glyph.position.x <= x { + return Some(glyph.index); + } + } + } + Some(0) + } + } + /// closest_index_for_x returns the character boundary closest to the given x coordinate /// (e.g. to handle aligning up/down arrow keys) - pub fn index_for_x(&self, x: Pixels) -> usize { + pub fn closest_index_for_x(&self, x: Pixels) -> usize { let mut prev_index = 0; let mut prev_x = px(0.); @@ -262,10 +278,34 @@ impl WrappedLineLayout { } /// The index corresponding to a given position in this layout for the given line height. + /// + /// See also [`Self::closest_index_for_position`]. pub fn index_for_position( + &self, + position: Point, + line_height: Pixels, + ) -> Result { + self._index_for_position(position, line_height, false) + } + + /// The closest index to a given position in this layout for the given line height. + /// + /// Closest means the character boundary closest to the given position. + /// + /// See also [`LineLayout::closest_index_for_x`]. + pub fn closest_index_for_position( + &self, + position: Point, + line_height: Pixels, + ) -> Result { + self._index_for_position(position, line_height, true) + } + + fn _index_for_position( &self, mut position: Point, line_height: Pixels, + closest: bool, ) -> Result { let wrapped_line_ix = (position.y / line_height) as usize; @@ -305,9 +345,16 @@ impl WrappedLineLayout { } else if position_in_unwrapped_line.x >= wrapped_line_end_x { Err(wrapped_line_end_index) } else { - Ok(self - .unwrapped_layout - .index_for_x(position_in_unwrapped_line.x)) + if closest { + Ok(self + .unwrapped_layout + .closest_index_for_x(position_in_unwrapped_line.x)) + } else { + Ok(self + .unwrapped_layout + .index_for_x(position_in_unwrapped_line.x) + .unwrap()) + } } } diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 45159313b43c508029f2525234c80c6575d0f695..e4e18671a3d85c2f55abd8f8a61ec80833dabdf5 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -182,6 +182,11 @@ impl LineWrapper { // Cyrillic for Russian, Ukrainian, etc. // https://en.wikipedia.org/wiki/Cyrillic_script_in_Unicode matches!(c, '\u{0400}'..='\u{04FF}') || + + // Vietnamese (https://vietunicode.sourceforge.net/charset/) + matches!(c, '\u{1E00}'..='\u{1EFF}') || // Latin Extended Additional + matches!(c, '\u{0300}'..='\u{036F}') || // Combining Diacritical Marks + // Some other known special characters that should be treated as word characters, // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, // `2^3`, `a~b`, `a=1`, `Self::new`, etc. @@ -618,7 +623,12 @@ mod tests { #[track_caller] fn assert_word(word: &str) { for c in word.chars() { - assert!(LineWrapper::is_word_char(c), "assertion failed for '{}'", c); + assert!( + LineWrapper::is_word_char(c), + "assertion failed for '{}' (unicode 0x{:x})", + c, + c as u32 + ); } } @@ -661,6 +671,8 @@ mod tests { assert_word("ƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏ"); // Cyrillic assert_word("АБВГДЕЖЗИЙКЛМНОП"); + // Vietnamese (https://github.com/zed-industries/zed/issues/23245) + assert_word("ThậmchíđếnkhithuachạychúngcònnhẫntâmgiếtnốtsốđôngtùchínhtrịởYênBáivàCaoBằng"); // non-word characters assert_not_word("你好"); diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 51fd692f86805886529f17f03feea8bf7ff9db03..840f2223fcc4a62b6e522f38b967a3fe4ad3209e 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -9,14 +9,15 @@ use crate::{ KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId, LineLayoutIndex, Modifiers, ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, - PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, PromptButton, PromptLevel, Quad, - Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Replay, ResizeEdge, - SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, ScaledPixels, Scene, Shadow, - SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, SystemWindowTab, - SystemWindowTabController, TabStopMap, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement, - TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance, - WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem, - point, prelude::*, px, rems, size, transparent_black, + PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, Priority, PromptButton, + PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, + Replay, ResizeEdge, SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, + ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style, SubscriberSet, + Subscription, SystemWindowTab, SystemWindowTabController, TabStopMap, TaffyLayoutEngine, Task, + TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, + WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, + WindowOptions, WindowParams, WindowTextSystem, point, prelude::*, px, rems, size, + transparent_black, }; use anyhow::{Context as _, Result, anyhow}; use collections::{FxHashMap, FxHashSet}; @@ -344,8 +345,8 @@ impl FocusHandle { } /// Moves the focus to the element associated with this handle. - pub fn focus(&self, window: &mut Window) { - window.focus(self) + pub fn focus(&self, window: &mut Window, cx: &mut App) { + window.focus(self, cx) } /// Obtains whether the element associated with this handle is currently focused. @@ -596,7 +597,7 @@ pub enum HitboxBehavior { /// ``` /// /// This has effects beyond event handling - any use of hitbox checking, such as hover - /// styles and tooltops. These other behaviors are the main point of this mechanism. An + /// styles and tooltips. These other behaviors are the main point of this mechanism. An /// alternative might be to not affect mouse event handling - but this would allow /// inconsistent UI where clicks and moves interact with elements that are not considered to /// be hovered. @@ -624,7 +625,7 @@ pub enum HitboxBehavior { /// desired, then a `cx.stop_propagation()` handler like the one above can be used. /// /// This has effects beyond event handling - this affects any use of `is_hovered`, such as - /// hover styles and tooltops. These other behaviors are the main point of this mechanism. + /// hover styles and tooltips. These other behaviors are the main point of this mechanism. /// An alternative might be to not affect mouse event handling - but this would allow /// inconsistent UI where clicks and moves interact with elements that are not considered to /// be hovered. @@ -909,6 +910,7 @@ struct PendingInput { keystrokes: SmallVec<[Keystroke; 1]>, focus: Option, timer: Option>, + needs_timeout: bool, } pub(crate) struct ElementStateBox { @@ -917,86 +919,69 @@ pub(crate) struct ElementStateBox { pub(crate) type_name: &'static str, } -fn default_bounds(display_id: Option, cx: &mut App) -> Bounds { - #[cfg(target_os = "macos")] - { - const CASCADE_OFFSET: f32 = 25.0; - - let display = display_id - .map(|id| cx.find_display(id)) - .unwrap_or_else(|| cx.primary_display()); - - let display_bounds = display - .as_ref() - .map(|d| d.bounds()) - .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE)); - - // TODO, BUG: if you open a window with the currently active window - // on the stack, this will erroneously select the 'unwrap_or_else' - // code path - let (base_origin, base_size) = cx - .active_window() - .and_then(|w| { - w.update(cx, |_, window, _| { - let bounds = window.bounds(); - (bounds.origin, bounds.size) - }) - .ok() - }) - .unwrap_or_else(|| { - let default_bounds = display - .as_ref() - .map(|d| d.default_bounds()) - .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE)); - (default_bounds.origin, default_bounds.size) - }); - - let cascade_offset = point(px(CASCADE_OFFSET), px(CASCADE_OFFSET)); - let proposed_origin = base_origin + cascade_offset; - let proposed_bounds = Bounds::new(proposed_origin, base_size); - - let display_right = display_bounds.origin.x + display_bounds.size.width; - let display_bottom = display_bounds.origin.y + display_bounds.size.height; - let window_right = proposed_bounds.origin.x + proposed_bounds.size.width; - let window_bottom = proposed_bounds.origin.y + proposed_bounds.size.height; - - let fits_horizontally = window_right <= display_right; - let fits_vertically = window_bottom <= display_bottom; - - let final_origin = match (fits_horizontally, fits_vertically) { - (true, true) => proposed_origin, - (false, true) => point(display_bounds.origin.x, base_origin.y), - (true, false) => point(base_origin.x, display_bounds.origin.y), - (false, false) => display_bounds.origin, - }; - - Bounds::new(final_origin, base_size) - } - - #[cfg(not(target_os = "macos"))] - { - const DEFAULT_WINDOW_OFFSET: Point = point(px(0.), px(35.)); - - // TODO, BUG: if you open a window with the currently active window - // on the stack, this will erroneously select the 'unwrap_or_else' - // code path - cx.active_window() - .and_then(|w| w.update(cx, |_, window, _| window.bounds()).ok()) - .map(|mut bounds| { - bounds.origin += DEFAULT_WINDOW_OFFSET; - bounds - }) - .unwrap_or_else(|| { - let display = display_id - .map(|id| cx.find_display(id)) - .unwrap_or_else(|| cx.primary_display()); - - display - .as_ref() - .map(|display| display.default_bounds()) - .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE)) - }) - } +fn default_bounds(display_id: Option, cx: &mut App) -> WindowBounds { + // TODO, BUG: if you open a window with the currently active window + // on the stack, this will erroneously fallback to `None` + // + // TODO these should be the initial window bounds not considering maximized/fullscreen + let active_window_bounds = cx + .active_window() + .and_then(|w| w.update(cx, |_, window, _| window.window_bounds()).ok()); + + const CASCADE_OFFSET: f32 = 25.0; + + let display = display_id + .map(|id| cx.find_display(id)) + .unwrap_or_else(|| cx.primary_display()); + + let default_placement = || Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE); + + // Use visible_bounds to exclude taskbar/dock areas + let display_bounds = display + .as_ref() + .map(|d| d.visible_bounds()) + .unwrap_or_else(default_placement); + + let ( + Bounds { + origin: base_origin, + size: base_size, + }, + window_bounds_ctor, + ): (_, fn(Bounds) -> WindowBounds) = match active_window_bounds { + Some(bounds) => match bounds { + WindowBounds::Windowed(bounds) => (bounds, WindowBounds::Windowed), + WindowBounds::Maximized(bounds) => (bounds, WindowBounds::Maximized), + WindowBounds::Fullscreen(bounds) => (bounds, WindowBounds::Fullscreen), + }, + None => ( + display + .as_ref() + .map(|d| d.default_bounds()) + .unwrap_or_else(default_placement), + WindowBounds::Windowed, + ), + }; + + let cascade_offset = point(px(CASCADE_OFFSET), px(CASCADE_OFFSET)); + let proposed_origin = base_origin + cascade_offset; + let proposed_bounds = Bounds::new(proposed_origin, base_size); + + let display_right = display_bounds.origin.x + display_bounds.size.width; + let display_bottom = display_bounds.origin.y + display_bounds.size.height; + let window_right = proposed_bounds.origin.x + proposed_bounds.size.width; + let window_bottom = proposed_bounds.origin.y + proposed_bounds.size.height; + + let fits_horizontally = window_right <= display_right; + let fits_vertically = window_bottom <= display_bottom; + + let final_origin = match (fits_horizontally, fits_vertically) { + (true, true) => proposed_origin, + (false, true) => point(display_bounds.origin.x, base_origin.y), + (true, false) => point(base_origin.x, display_bounds.origin.y), + (false, false) => display_bounds.origin, + }; + window_bounds_ctor(Bounds::new(final_origin, base_size)) } impl Window { @@ -1023,13 +1008,11 @@ impl Window { tabbing_identifier, } = options; - let bounds = window_bounds - .map(|bounds| bounds.get_bounds()) - .unwrap_or_else(|| default_bounds(display_id, cx)); + let window_bounds = window_bounds.unwrap_or_else(|| default_bounds(display_id, cx)); let mut platform_window = cx.platform.open_window( handle, WindowParams { - bounds, + bounds: window_bounds.get_bounds(), titlebar, kind, is_movable, @@ -1070,12 +1053,10 @@ impl Window { .request_decorations(window_decorations.unwrap_or(WindowDecorations::Server)); platform_window.set_background_appearance(window_background); - if let Some(ref window_open_state) = window_bounds { - match window_open_state { - WindowBounds::Fullscreen(_) => platform_window.toggle_fullscreen(), - WindowBounds::Maximized(_) => platform_window.zoom(), - WindowBounds::Windowed(_) => {} - } + match window_bounds { + WindowBounds::Fullscreen(_) => platform_window.toggle_fullscreen(), + WindowBounds::Maximized(_) => platform_window.zoom(), + WindowBounds::Windowed(_) => {} } platform_window.on_close(Box::new({ @@ -1455,13 +1436,25 @@ impl Window { } /// Move focus to the element associated with the given [`FocusHandle`]. - pub fn focus(&mut self, handle: &FocusHandle) { + pub fn focus(&mut self, handle: &FocusHandle, cx: &mut App) { if !self.focus_enabled || self.focus == Some(handle.id) { return; } self.focus = Some(handle.id); self.clear_pending_keystrokes(); + + // Avoid re-entrant entity updates by deferring observer notifications to the end of the + // current effect cycle, and only for this window. + let window_handle = self.handle; + cx.defer(move |cx| { + window_handle + .update(cx, |_, window, cx| { + window.pending_input_changed(cx); + }) + .ok(); + }); + self.refresh(); } @@ -1482,24 +1475,24 @@ impl Window { } /// Move focus to next tab stop. - pub fn focus_next(&mut self) { + pub fn focus_next(&mut self, cx: &mut App) { if !self.focus_enabled { return; } if let Some(handle) = self.rendered_frame.tab_stops.next(self.focus.as_ref()) { - self.focus(&handle) + self.focus(&handle, cx) } } /// Move focus to previous tab stop. - pub fn focus_prev(&mut self) { + pub fn focus_prev(&mut self, cx: &mut App) { if !self.focus_enabled { return; } if let Some(handle) = self.rendered_frame.tab_stops.prev(self.focus.as_ref()) { - self.focus(&handle) + self.focus(&handle, cx) } } @@ -1517,7 +1510,8 @@ impl Window { style } - /// Check if the platform window is maximized + /// Check if the platform window is maximized. + /// /// On some platforms (namely Windows) this is different than the bounds being the size of the display pub fn is_maximized(&self) -> bool { self.platform_window.is_maximized() @@ -1744,6 +1738,27 @@ impl Window { }) } + /// Spawn the future returned by the given closure on the application thread + /// pool, with the given priority. The closure is provided a handle to the + /// current window and an `AsyncWindowContext` for use within your future. + #[track_caller] + pub fn spawn_with_priority( + &self, + priority: Priority, + cx: &App, + f: AsyncFn, + ) -> Task + where + R: 'static, + AsyncFn: AsyncFnOnce(&mut AsyncWindowContext) -> R + 'static, + { + let handle = self.handle; + cx.spawn_with_priority(priority, async move |app| { + let mut async_window_cx = AsyncWindowContext::new_context(app.clone(), handle); + f(&mut async_window_cx).await + }) + } + fn bounds_changed(&mut self, cx: &mut App) { self.scale_factor = self.platform_window.scale_factor(); self.viewport_size = self.platform_window.content_size(); @@ -1819,6 +1834,7 @@ impl Window { self.platform_window.show_window_menu(position) } + /// Handle window movement for Linux and macOS. /// Tells the compositor to take control of window movement (Wayland and X11) /// /// Events may not be received during a move operation. @@ -1957,7 +1973,7 @@ impl Window { } /// Determine whether the given action is available along the dispatch path to the currently focused element. - pub fn is_action_available(&self, action: &dyn Action, cx: &mut App) -> bool { + pub fn is_action_available(&self, action: &dyn Action, cx: &App) -> bool { let node_id = self.focus_node_id_in_rendered_frame(self.focused(cx).map(|handle| handle.id)); self.rendered_frame @@ -1965,6 +1981,14 @@ impl Window { .is_action_available(action, node_id) } + /// Determine whether the given action is available along the dispatch path to the given focus_handle. + pub fn is_action_available_in(&self, action: &dyn Action, focus_handle: &FocusHandle) -> bool { + let node_id = self.focus_node_id_in_rendered_frame(Some(focus_handle.id)); + self.rendered_frame + .dispatch_tree + .is_action_available(action, node_id) + } + /// The position of the mouse relative to the window. pub fn mouse_position(&self) -> Point { self.mouse_position @@ -2004,7 +2028,9 @@ impl Window { if let Some(input_handler) = self.platform_window.take_input_handler() { self.rendered_frame.input_handlers.push(Some(input_handler)); } - self.draw_roots(cx); + if !cx.mode.skip_drawing() { + self.draw_roots(cx); + } self.dirty_views.clear(); self.next_frame.window_active = self.active.get(); @@ -2432,7 +2458,7 @@ impl Window { } /// Updates the cursor style at the platform level. This method should only be called - /// during the prepaint phase of element drawing. + /// during the paint phase of element drawing. pub fn set_cursor_style(&mut self, style: CursorStyle, hitbox: &Hitbox) { self.invalidator.debug_assert_paint(); self.next_frame.cursor_styles.push(CursorStyleRequest { @@ -2443,7 +2469,7 @@ impl Window { /// Updates the cursor style for the entire window at the platform level. A cursor /// style using this method will have precedence over any cursor style set using - /// `set_cursor_style`. This method should only be called during the prepaint + /// `set_cursor_style`. This method should only be called during the paint /// phase of element drawing. pub fn set_window_cursor_style(&mut self, style: CursorStyle) { self.invalidator.debug_assert_paint(); @@ -3699,6 +3725,9 @@ impl Window { self.modifiers = mouse_up.modifiers; PlatformInput::MouseUp(mouse_up) } + PlatformInput::MousePressure(mouse_pressure) => { + PlatformInput::MousePressure(mouse_pressure) + } PlatformInput::MouseExited(mouse_exited) => { self.modifiers = mouse_exited.modifiers; PlatformInput::MouseExited(mouse_exited) @@ -3895,32 +3924,52 @@ impl Window { } if !match_result.pending.is_empty() { + currently_pending.timer.take(); currently_pending.keystrokes = match_result.pending; currently_pending.focus = self.focus; - currently_pending.timer = Some(self.spawn(cx, async move |cx| { - cx.background_executor.timer(Duration::from_secs(1)).await; - cx.update(move |window, cx| { - let Some(currently_pending) = window - .pending_input - .take() - .filter(|pending| pending.focus == window.focus) - else { - return; - }; - - let node_id = window.focus_node_id_in_rendered_frame(window.focus); - let dispatch_path = window.rendered_frame.dispatch_tree.dispatch_path(node_id); - let to_replay = window - .rendered_frame - .dispatch_tree - .flush_dispatch(currently_pending.keystrokes, &dispatch_path); + let text_input_requires_timeout = event + .downcast_ref::() + .filter(|key_down| key_down.keystroke.key_char.is_some()) + .and_then(|_| self.platform_window.take_input_handler()) + .map_or(false, |mut input_handler| { + let accepts = input_handler.accepts_text_input(self, cx); + self.platform_window.set_input_handler(input_handler); + accepts + }); - window.pending_input_changed(cx); - window.replay_pending_input(to_replay, cx) - }) - .log_err(); - })); + currently_pending.needs_timeout |= + match_result.pending_has_binding || text_input_requires_timeout; + + if currently_pending.needs_timeout { + currently_pending.timer = Some(self.spawn(cx, async move |cx| { + cx.background_executor.timer(Duration::from_secs(1)).await; + cx.update(move |window, cx| { + let Some(currently_pending) = window + .pending_input + .take() + .filter(|pending| pending.focus == window.focus) + else { + return; + }; + + let node_id = window.focus_node_id_in_rendered_frame(window.focus); + let dispatch_path = + window.rendered_frame.dispatch_tree.dispatch_path(node_id); + + let to_replay = window + .rendered_frame + .dispatch_tree + .flush_dispatch(currently_pending.keystrokes, &dispatch_path); + + window.pending_input_changed(cx); + window.replay_pending_input(to_replay, cx) + }) + .log_err(); + })); + } else { + currently_pending.timer = None; + } self.pending_input = Some(currently_pending); self.pending_input_changed(cx); cx.propagate_event = false; @@ -3983,7 +4032,7 @@ impl Window { self.dispatch_keystroke_observers(event, None, context_stack, cx); } - fn pending_input_changed(&mut self, cx: &mut App) { + pub(crate) fn pending_input_changed(&mut self, cx: &mut App) { self.pending_input_observers .clone() .retain(&(), |callback| callback(self, cx)); @@ -4401,6 +4450,13 @@ impl Window { dispatch_tree.highest_precedence_binding_for_action(action, &context_stack) } + /// Find the bindings that can follow the current input sequence for the current context stack. + pub fn possible_bindings_for_input(&self, input: &[Keystroke]) -> Vec { + self.rendered_frame + .dispatch_tree + .possible_next_bindings_for_input(input, &self.context_stack()) + } + fn context_stack_for_focus_handle( &self, focus_handle: &FocusHandle, @@ -5060,6 +5116,18 @@ impl From for ElementId { } } +impl From for ElementId { + fn from(name: String) -> Self { + ElementId::Name(name.into()) + } +} + +impl From> for ElementId { + fn from(name: Arc) -> Self { + ElementId::Name(name.into()) + } +} + impl From> for ElementId { fn from(path: Arc) -> Self { ElementId::Path(path) diff --git a/crates/gpui/src/window/prompts.rs b/crates/gpui/src/window/prompts.rs index 63ad1668bec298a6b59d218bf7d4ca7cdce11e8c..980c6f6812405a8fbf4f8c6e24388ab4f967a94c 100644 --- a/crates/gpui/src/window/prompts.rs +++ b/crates/gpui/src/window/prompts.rs @@ -44,10 +44,10 @@ impl PromptHandle { if let Some(sender) = sender.take() { sender.send(e.0).ok(); window_handle - .update(cx, |_, window, _cx| { + .update(cx, |_, window, cx| { window.prompt.take(); if let Some(previous_focus) = &previous_focus { - window.focus(previous_focus); + window.focus(previous_focus, cx); } }) .ok(); @@ -55,7 +55,7 @@ impl PromptHandle { }) .detach(); - window.focus(&view.focus_handle(cx)); + window.focus(&view.focus_handle(cx), cx); RenderablePromptHandle { view: Box::new(view), diff --git a/crates/gpui_macros/src/derive_visual_context.rs b/crates/gpui_macros/src/derive_visual_context.rs index f2681bb29b92f31d31599ebb7201a42a482283d8..b827e753d9678efba01d3fdd77f8e66ea62b6bbd 100644 --- a/crates/gpui_macros/src/derive_visual_context.rs +++ b/crates/gpui_macros/src/derive_visual_context.rs @@ -62,7 +62,7 @@ pub fn derive_visual_context(input: TokenStream) -> TokenStream { V: gpui::Focusable, { let focus_handle = gpui::Focusable::focus_handle(entity, self.#app_variable); - self.#window_variable.focus(&focus_handle) + self.#window_variable.focus(&focus_handle, self.#app_variable) } } }; diff --git a/crates/gpui_macros/tests/derive_inspector_reflection.rs b/crates/gpui_macros/tests/derive_inspector_reflection.rs index a0adcb7801e55d7272191a1e4e831b2c9c6b115c..92f4e56e9c0c48d9c9279e7bf1bb283eeb1139a7 100644 --- a/crates/gpui_macros/tests/derive_inspector_reflection.rs +++ b/crates/gpui_macros/tests/derive_inspector_reflection.rs @@ -1,8 +1,7 @@ //! This code was generated using Zed Agent with Claude Opus 4. -use gpui_macros::derive_inspector_reflection; - -#[derive_inspector_reflection] +// gate on rust-analyzer so rust-analyzer never needs to expand this macro, it takes up to 10 seconds to expand due to inefficiencies in rust-analyzers proc-macro srv +#[cfg_attr(not(rust_analyzer), gpui_macros::derive_inspector_reflection)] trait Transform: Clone { /// Doubles the value fn double(self) -> Self; diff --git a/crates/gpui_tokio/src/gpui_tokio.rs b/crates/gpui_tokio/src/gpui_tokio.rs index 61dcfc48efb1dfecc04c4a131ddc32691e01e255..9cfa1493af49ee95210edb9669a6ca89095f42cd 100644 --- a/crates/gpui_tokio/src/gpui_tokio.rs +++ b/crates/gpui_tokio/src/gpui_tokio.rs @@ -5,25 +5,48 @@ use util::defer; pub use tokio::task::JoinError; +/// Initializes the Tokio wrapper using a new Tokio runtime with 2 worker threads. +/// +/// If you need more threads (or access to the runtime outside of GPUI), you can create the runtime +/// yourself and pass a Handle to `init_from_handle`. pub fn init(cx: &mut App) { - cx.set_global(GlobalTokio::new()); + let runtime = tokio::runtime::Builder::new_multi_thread() + // Since we now have two executors, let's try to keep our footprint small + .worker_threads(2) + .enable_all() + .build() + .expect("Failed to initialize Tokio"); + + cx.set_global(GlobalTokio::new(RuntimeHolder::Owned(runtime))); +} + +/// Initializes the Tokio wrapper using a Tokio runtime handle. +pub fn init_from_handle(cx: &mut App, handle: tokio::runtime::Handle) { + cx.set_global(GlobalTokio::new(RuntimeHolder::Shared(handle))); +} + +enum RuntimeHolder { + Owned(tokio::runtime::Runtime), + Shared(tokio::runtime::Handle), +} + +impl RuntimeHolder { + pub fn handle(&self) -> &tokio::runtime::Handle { + match self { + RuntimeHolder::Owned(runtime) => runtime.handle(), + RuntimeHolder::Shared(handle) => handle, + } + } } struct GlobalTokio { - runtime: tokio::runtime::Runtime, + runtime: RuntimeHolder, } impl Global for GlobalTokio {} impl GlobalTokio { - fn new() -> Self { - let runtime = tokio::runtime::Builder::new_multi_thread() - // Since we now have two executors, let's try to keep our footprint small - .worker_threads(2) - .enable_all() - .build() - .expect("Failed to initialize Tokio"); - + fn new(runtime: RuntimeHolder) -> Self { Self { runtime } } } @@ -40,7 +63,7 @@ impl Tokio { R: Send + 'static, { cx.read_global(|tokio: &GlobalTokio, cx| { - let join_handle = tokio.runtime.spawn(f); + let join_handle = tokio.runtime.handle().spawn(f); let abort_handle = join_handle.abort_handle(); let cancel = defer(move || { abort_handle.abort(); @@ -62,7 +85,7 @@ impl Tokio { R: Send + 'static, { cx.read_global(|tokio: &GlobalTokio, cx| { - let join_handle = tokio.runtime.spawn(f); + let join_handle = tokio.runtime.handle().spawn(f); let abort_handle = join_handle.abort_handle(); let cancel = defer(move || { abort_handle.abort(); diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 16600627a77f6a73fa913340f29f5a2da0875de9..177f8639ca1a5d75bd0130979f4d550e3622a1b4 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -28,7 +28,6 @@ http-body.workspace = true http.workspace = true log.workspace = true parking_lot.workspace = true -reqwest.workspace = true serde.workspace = true serde_json.workspace = true serde_urlencoded.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index 6b99a54a7d941c290f2680bc2a599bc63251e24b..8fb49f218568ea36078d772a7225229f31a916c4 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -88,17 +88,6 @@ impl From<&'static str> for AsyncBody { } } -impl TryFrom for AsyncBody { - type Error = anyhow::Error; - - fn try_from(value: reqwest::Body) -> Result { - value - .as_bytes() - .ok_or_else(|| anyhow::anyhow!("Underlying data is a stream")) - .map(|bytes| Self::from_bytes(Bytes::copy_from_slice(bytes))) - } -} - impl> From> for AsyncBody { fn from(body: Option) -> Self { match body { diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index 32efed8e727330d3ac1c2fb6d8ea5d57fdd66dd4..e52e2f1d2555de477cd4597826bc3bd8308faf89 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -1,10 +1,13 @@ -use crate::HttpClient; +use crate::{HttpClient, HttpRequestExt}; use anyhow::{Context as _, Result, anyhow, bail}; use futures::AsyncReadExt; +use http::Request; use serde::Deserialize; use std::sync::Arc; use url::Url; +const GITHUB_API_URL: &str = "https://api.github.com"; + pub struct GitHubLspBinaryVersion { pub name: String, pub url: String, @@ -34,12 +37,17 @@ pub async fn latest_github_release( pre_release: bool, http: Arc, ) -> anyhow::Result { + let url = format!("{GITHUB_API_URL}/repos/{repo_name_with_owner}/releases"); + + let request = Request::get(&url) + .follow_redirects(crate::RedirectPolicy::FollowAll) + .when_some(std::env::var("GITHUB_TOKEN").ok(), |builder, token| { + builder.header("Authorization", format!("Bearer {}", token)) + }) + .body(Default::default())?; + let mut response = http - .get( - format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(), - Default::default(), - true, - ) + .send(request) .await .context("error fetching latest release")?; @@ -91,12 +99,17 @@ pub async fn get_release_by_tag_name( tag: &str, http: Arc, ) -> anyhow::Result { + let url = format!("{GITHUB_API_URL}/repos/{repo_name_with_owner}/releases/tags/{tag}"); + + let request = Request::get(&url) + .follow_redirects(crate::RedirectPolicy::FollowAll) + .when_some(std::env::var("GITHUB_TOKEN").ok(), |builder, token| { + builder.header("Authorization", format!("Bearer {}", token)) + }) + .body(Default::default())?; + let mut response = http - .get( - &format!("https://api.github.com/repos/{repo_name_with_owner}/releases/tags/{tag}"), - Default::default(), - true, - ) + .send(request) .await .context("error fetching latest release")?; diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index a75df61646f31c9dc997bea83acc9d669bf1e29e..1182ef74ca3d59a2d59419e185ff5bd673c5d505 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -8,15 +8,12 @@ use derive_more::Deref; use http::HeaderValue; pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder}; -use futures::{ - FutureExt as _, - future::{self, BoxFuture}, -}; +use futures::future::BoxFuture; use parking_lot::Mutex; use serde::Serialize; +use std::sync::Arc; #[cfg(feature = "test-support")] -use std::fmt; -use std::{any::type_name, sync::Arc}; +use std::{any::type_name, fmt}; pub use url::{Host, Url}; #[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] @@ -59,10 +56,10 @@ impl HttpRequestExt for http::request::Builder { } pub trait HttpClient: 'static + Send + Sync { - fn type_name(&self) -> &'static str; - fn user_agent(&self) -> Option<&HeaderValue>; + fn proxy(&self) -> Option<&Url>; + fn send( &self, req: http::Request, @@ -106,20 +103,10 @@ pub trait HttpClient: 'static + Send + Sync { } } - fn proxy(&self) -> Option<&Url>; - #[cfg(feature = "test-support")] fn as_fake(&self) -> &FakeHttpClient { panic!("called as_fake on {}", type_name::()) } - - fn send_multipart_form<'a>( - &'a self, - _url: &str, - _request: reqwest::multipart::Form, - ) -> BoxFuture<'a, anyhow::Result>> { - future::ready(Err(anyhow!("not implemented"))).boxed() - } } /// An [`HttpClient`] that may have a proxy. @@ -163,38 +150,20 @@ impl HttpClient for HttpClientWithProxy { self.proxy.as_ref() } - fn type_name(&self) -> &'static str { - self.client.type_name() - } - #[cfg(feature = "test-support")] fn as_fake(&self) -> &FakeHttpClient { self.client.as_fake() } - - fn send_multipart_form<'a>( - &'a self, - url: &str, - form: reqwest::multipart::Form, - ) -> BoxFuture<'a, anyhow::Result>> { - self.client.send_multipart_form(url, form) - } } /// An [`HttpClient`] that has a base URL. +#[derive(Deref)] pub struct HttpClientWithUrl { base_url: Mutex, + #[deref] client: HttpClientWithProxy, } -impl std::ops::Deref for HttpClientWithUrl { - type Target = HttpClientWithProxy; - - fn deref(&self) -> &Self::Target { - &self.client - } -} - impl HttpClientWithUrl { /// Returns a new [`HttpClientWithUrl`] with the given base URL. pub fn new( @@ -314,22 +283,10 @@ impl HttpClient for HttpClientWithUrl { self.client.proxy.as_ref() } - fn type_name(&self) -> &'static str { - self.client.type_name() - } - #[cfg(feature = "test-support")] fn as_fake(&self) -> &FakeHttpClient { self.client.as_fake() } - - fn send_multipart_form<'a>( - &'a self, - url: &str, - request: reqwest::multipart::Form, - ) -> BoxFuture<'a, anyhow::Result>> { - self.client.send_multipart_form(url, request) - } } pub fn read_proxy_from_env() -> Option { @@ -384,10 +341,6 @@ impl HttpClient for BlockedHttpClient { None } - fn type_name(&self) -> &'static str { - type_name::() - } - #[cfg(feature = "test-support")] fn as_fake(&self) -> &FakeHttpClient { panic!("called as_fake on {}", type_name::()) @@ -428,6 +381,7 @@ impl FakeHttpClient { } pub fn with_404_response() -> Arc { + log::warn!("Using fake HTTP client with 404 response"); Self::create(|_| async move { Ok(Response::builder() .status(404) @@ -437,6 +391,7 @@ impl FakeHttpClient { } pub fn with_200_response() -> Arc { + log::warn!("Using fake HTTP client with 200 response"); Self::create(|_| async move { Ok(Response::builder() .status(200) @@ -482,10 +437,6 @@ impl HttpClient for FakeHttpClient { None } - fn type_name(&self) -> &'static str { - type_name::() - } - fn as_fake(&self) -> &FakeHttpClient { self } diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index fb45ca1eb5f8334190c11ad811a31128396ba23a..23ae7a6d928d98aafe48d28cfe5626bbf76d29b8 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -34,6 +34,7 @@ pub enum IconName { ArrowRightLeft, ArrowUp, ArrowUpRight, + AtSign, Attach, AudioOff, AudioOn, @@ -44,10 +45,11 @@ pub enum IconName { BellRing, Binary, Blocks, - BoltOutlined, BoltFilled, + BoltOutlined, Book, BookCopy, + Box, CaseSensitive, Chat, Check, @@ -79,13 +81,12 @@ pub enum IconName { Debug, DebugBreakpoint, DebugContinue, + DebugDetach, DebugDisabledBreakpoint, DebugDisabledLogBreakpoint, - DebugDetach, DebugIgnoreBreakpoints, DebugLogBreakpoint, DebugPause, - DebugStepBack, DebugStepInto, DebugStepOut, DebugStepOver, @@ -136,10 +137,12 @@ pub enum IconName { GenericRestore, GitBranch, GitBranchAlt, + GitBranchPlus, Github, Hash, HistoryRerun, Image, + Inception, Indicator, Info, Json, @@ -147,6 +150,7 @@ pub enum IconName { Library, LineHeight, Link, + Linux, ListCollapse, ListFilter, ListTodo, @@ -172,8 +176,8 @@ pub enum IconName { PencilUnavailable, Person, Pin, - PlayOutlined, PlayFilled, + PlayOutlined, Plus, Power, Public, @@ -216,6 +220,7 @@ pub enum IconName { SupermavenError, SupermavenInit, SwatchBook, + SweepAi, Tab, Terminal, TerminalAlt, @@ -255,18 +260,18 @@ pub enum IconName { XCircle, XCircleFilled, ZedAgent, + ZedAgentTwo, ZedAssistant, ZedBurnMode, ZedBurnModeOn, - ZedSrcCustom, - ZedSrcExtension, ZedPredict, ZedPredictDisabled, ZedPredictDown, ZedPredictError, ZedPredictUp, + ZedSrcCustom, + ZedSrcExtension, ZedXCopilot, - Linux, } impl IconName { diff --git a/crates/image_viewer/src/image_info.rs b/crates/image_viewer/src/image_info.rs index 6e8956abc67868457f071e04f3c2a1957ff6c19c..6eedb13ed1a150094ae4882718f2384b06cfe6a7 100644 --- a/crates/image_viewer/src/image_info.rs +++ b/crates/image_viewer/src/image_info.rs @@ -77,9 +77,7 @@ impl Render for ImageInfo { .to_string(), ); - div().child( - Button::new("image-metadata", components.join(" • ")).label_size(LabelSize::Small), - ) + div().child(Label::new(components.join(" • ")).size(LabelSize::Small)) } } diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index da99c5b92c1e6ad4d8a3e92ed2e565bcb518e227..9b145e920e48605f19f566ca14a7caf63aff8f0a 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -87,7 +87,7 @@ impl DivInspector { // Rust Analyzer doesn't get started for it. let rust_language_result = languages.language_for_name("Rust").await; let rust_style_buffer = rust_language_result.and_then(|rust_language| { - cx.new(|cx| Buffer::local("", cx).with_language(rust_language, cx)) + cx.new(|cx| Buffer::local("", cx).with_language_async(rust_language, cx)) }); match json_style_buffer.and_then(|json_style_buffer| { @@ -664,6 +664,8 @@ impl CompletionProvider for RustStyleCompletionProvider { replace_range: replace_range.clone(), new_text: format!(".{}()", method.name), label: CodeLabel::plain(method.name.to_string(), None), + match_start: None, + snippet_deduplication_key: None, icon_path: None, documentation: method.documentation.map(|documentation| { CompletionDocumentation::MultiLineMarkdown(documentation.into()) @@ -684,7 +686,6 @@ impl CompletionProvider for RustStyleCompletionProvider { position: language::Anchor, _text: &str, _trigger_in_words: bool, - _menu_is_open: bool, cx: &mut Context, ) -> bool { completion_replace_range(&buffer.read(cx).snapshot(), &position).is_some() diff --git a/crates/inspector_ui/src/inspector.rs b/crates/inspector_ui/src/inspector.rs index 7f7985df9b98ee286c79e18a665802b1f73fbc1e..a82d27b6d015bef97b50983e05f3e2096a1ef8c7 100644 --- a/crates/inspector_ui/src/inspector.rs +++ b/crates/inspector_ui/src/inspector.rs @@ -33,6 +33,7 @@ pub fn init(app_state: Arc, cx: &mut App) { app_state.languages.clone(), app_state.fs.clone(), None, + false, cx, ); diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index 9e73e0da550e806b4a642942766414a4b28249ae..f43949c0051f56559388203e387a540b8c593467 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -159,7 +159,7 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap cx, |s| s.select_ranges([len..len]), ); - if len > 0 { + if len.0 > 0 { editor.insert("\n\n", window, cx); } editor.insert(&entry_heading, window, cx); @@ -173,9 +173,15 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap } fn journal_dir(path: &str) -> Option { - shellexpand::full(path) //TODO handle this better - .ok() - .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal")) + let expanded = shellexpand::full(path).ok()?; + let base_path = Path::new(expanded.as_ref()); + let absolute_path = if base_path.is_absolute() { + base_path.to_path_buf() + } else { + log::warn!("Invalid journal path {path:?} (not absolute), falling back to home directory",); + std::env::home_dir()? + }; + Some(absolute_path.join("journal")) } fn heading_entry(now: NaiveTime, hour_format: &HourFormat) -> String { @@ -224,4 +230,65 @@ mod tests { assert_eq!(actual_heading_entry, expected_heading_entry); } } + + mod journal_dir_tests { + use super::super::*; + + #[test] + #[cfg(target_family = "unix")] + fn test_absolute_unix_path() { + let result = journal_dir("/home/user"); + assert!(result.is_some()); + let path = result.unwrap(); + assert!(path.is_absolute()); + assert_eq!(path, PathBuf::from("/home/user/journal")); + } + + #[test] + fn test_tilde_expansion() { + let result = journal_dir("~/documents"); + assert!(result.is_some()); + let path = result.unwrap(); + + assert!(path.is_absolute(), "Tilde should expand to absolute path"); + + if let Some(home) = std::env::home_dir() { + assert_eq!(path, home.join("documents").join("journal")); + } + } + + #[test] + fn test_relative_path_falls_back_to_home() { + for relative_path in ["relative/path", "NONEXT/some/path", "../some/path"] { + let result = journal_dir(relative_path); + assert!(result.is_some(), "Failed for path: {}", relative_path); + let path = result.unwrap(); + + assert!( + path.is_absolute(), + "Path should be absolute for input '{}', got: {:?}", + relative_path, + path + ); + + if let Some(home) = std::env::home_dir() { + assert_eq!( + path, + home.join("journal"), + "Should fall back to home directory for input '{}'", + relative_path + ); + } + } + } + + #[test] + #[cfg(target_os = "windows")] + fn test_absolute_path_windows_style() { + let result = journal_dir("C:\\Users\\user\\Documents"); + assert!(result.is_some()); + let path = result.unwrap(); + assert_eq!(path, PathBuf::from("C:\\Users\\user\\Documents\\journal")); + } + } } diff --git a/crates/json_schema_store/src/json_schema_store.rs b/crates/json_schema_store/src/json_schema_store.rs index b44efb8b1b135850ab78460a428b5088e5fa0928..18041545ccd404eef0035b9b50ff8244d212fa0b 100644 --- a/crates/json_schema_store/src/json_schema_store.rs +++ b/crates/json_schema_store/src/json_schema_store.rs @@ -3,8 +3,9 @@ use std::{str::FromStr, sync::Arc}; use anyhow::{Context as _, Result}; use gpui::{App, AsyncApp, BorrowAppContext as _, Entity, WeakEntity}; -use language::LanguageRegistry; +use language::{LanguageRegistry, language_settings::all_language_settings}; use project::LspStore; +use util::schemars::{AllowTrailingCommas, DefaultDenyUnknownFields}; // Origin: https://github.com/SchemaStore/schemastore const TSCONFIG_SCHEMA: &str = include_str!("schemas/tsconfig.json"); @@ -159,14 +160,35 @@ pub fn resolve_schema_request_inner( } } "snippets" => snippet_provider::format::VsSnippetsFile::generate_json_schema(), + "jsonc" => jsonc_schema(), _ => { - anyhow::bail!("Unrecognized builtin JSON schema: {}", schema_name); + anyhow::bail!("Unrecognized builtin JSON schema: {schema_name}"); } }; Ok(schema) } -pub fn all_schema_file_associations(cx: &mut App) -> serde_json::Value { +const JSONC_LANGUAGE_NAME: &str = "JSONC"; + +pub fn all_schema_file_associations( + languages: &Arc, + cx: &mut App, +) -> serde_json::Value { + let extension_globs = languages + .available_language_for_name(JSONC_LANGUAGE_NAME) + .map(|language| language.matcher().path_suffixes.clone()) + .into_iter() + .flatten() + // Path suffixes can be entire file names or just their extensions. + .flat_map(|path_suffix| [format!("*.{path_suffix}"), path_suffix]); + let override_globs = all_language_settings(None, cx) + .file_types + .get(JSONC_LANGUAGE_NAME) + .into_iter() + .flat_map(|(_, glob_strings)| glob_strings) + .cloned(); + let jsonc_globs = extension_globs.chain(override_globs).collect::>(); + let mut file_associations = serde_json::json!([ { "fileMatch": [ @@ -211,6 +233,10 @@ pub fn all_schema_file_associations(cx: &mut App) -> serde_json::Value { "fileMatch": ["package.json"], "url": "zed://schemas/package_json" }, + { + "fileMatch": &jsonc_globs, + "url": "zed://schemas/jsonc" + }, ]); #[cfg(debug_assertions)] @@ -233,7 +259,7 @@ pub fn all_schema_file_associations(cx: &mut App) -> serde_json::Value { let file_name = normalized_action_name_to_file_name(normalized_name.clone()); serde_json::json!({ "fileMatch": [file_name], - "url": format!("zed://schemas/action/{}", normalized_name) + "url": format!("zed://schemas/action/{normalized_name}") }) }), ); @@ -249,6 +275,26 @@ fn package_json_schema() -> serde_json::Value { serde_json::Value::from_str(PACKAGE_JSON_SCHEMA).unwrap() } +fn jsonc_schema() -> serde_json::Value { + let generator = schemars::generate::SchemaSettings::draft2019_09() + .with_transform(DefaultDenyUnknownFields) + .with_transform(AllowTrailingCommas) + .into_generator(); + let meta_schema = generator + .settings() + .meta_schema + .as_ref() + .expect("meta_schema should be present in schemars settings") + .to_string(); + let defs = generator.definitions(); + let schema = schemars::json_schema!({ + "$schema": meta_schema, + "allowTrailingCommas": true, + "$defs": defs, + }); + serde_json::to_value(schema).unwrap() +} + fn generate_inspector_style_schema() -> serde_json::Value { let schema = schemars::generate::SchemaSettings::draft2019_09() .with_transform(util::schemars::DefaultDenyUnknownFields) diff --git a/crates/json_schema_store/src/schemas/package.json b/crates/json_schema_store/src/schemas/package.json index a24583fa8848891d661114291951d4df28f463fd..0906dcf36e30dcc9bdb64153a963368d3647a6d9 100644 --- a/crates/json_schema_store/src/schemas/package.json +++ b/crates/json_schema_store/src/schemas/package.json @@ -1030,22 +1030,22 @@ "$ref": "#" }, "eslintConfig": { - "$ref": "https://json.schemastore.org/eslintrc.json" + "$ref": "https://www.schemastore.org/eslintrc.json" }, "prettier": { - "$ref": "https://json.schemastore.org/prettierrc.json" + "$ref": "https://www.schemastore.org/prettierrc.json" }, "stylelint": { - "$ref": "https://json.schemastore.org/stylelintrc.json" + "$ref": "https://www.schemastore.org/stylelintrc.json" }, "ava": { - "$ref": "https://json.schemastore.org/ava.json" + "$ref": "https://www.schemastore.org/ava.json" }, "release": { - "$ref": "https://json.schemastore.org/semantic-release.json" + "$ref": "https://www.schemastore.org/semantic-release.json" }, "jscpd": { - "$ref": "https://json.schemastore.org/jscpd.json" + "$ref": "https://www.schemastore.org/jscpd.json" }, "pnpm": { "description": "Defines pnpm specific configuration.", @@ -1305,5 +1305,5 @@ ] } ], - "$id": "https://json.schemastore.org/package.json" + "$id": "https://www.schemastore.org/package.json" } diff --git a/crates/json_schema_store/src/schemas/tsconfig.json b/crates/json_schema_store/src/schemas/tsconfig.json index 4b9088725401e27dfc24c14d7c58acfae4355631..9484c027df59c2efe0d2c4024046fb6a839e78a9 100644 --- a/crates/json_schema_store/src/schemas/tsconfig.json +++ b/crates/json_schema_store/src/schemas/tsconfig.json @@ -1466,7 +1466,7 @@ } } }, - "id": "https://json.schemastore.org/tsconfig", + "id": "https://www.schemastore.org/tsconfig", "title": "JSON schema for the TypeScript compiler's configuration file", "type": "object" } diff --git a/crates/keymap_editor/Cargo.toml b/crates/keymap_editor/Cargo.toml index b6086566c3be01b60527d497b836fc53d101e467..33ba95ddd6d8df7efe2f551451af0340d83369c7 100644 --- a/crates/keymap_editor/Cargo.toml +++ b/crates/keymap_editor/Cargo.toml @@ -41,7 +41,6 @@ tree-sitter-rust.workspace = true ui_input.workspace = true ui.workspace = true util.workspace = true -vim.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 3d840de64d67f5bad7646339d66229ff47831028..be20feaf5f8c1feea5b08fa3a6a3b542b26ef5ce 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -81,50 +81,61 @@ pub fn init(cx: &mut App) { let keymap_event_channel = KeymapEventChannel::new(); cx.set_global(keymap_event_channel); - fn common(filter: Option, cx: &mut App) { - workspace::with_active_or_new_workspace(cx, move |workspace, window, cx| { - workspace - .with_local_workspace(window, cx, move |workspace, window, cx| { - let existing = workspace - .active_pane() - .read(cx) - .items() - .find_map(|item| item.downcast::()); - - let keymap_editor = if let Some(existing) = existing { - workspace.activate_item(&existing, true, true, window, cx); - existing - } else { - let keymap_editor = - cx.new(|cx| KeymapEditor::new(workspace.weak_handle(), window, cx)); - workspace.add_item_to_active_pane( - Box::new(keymap_editor.clone()), - None, - true, - window, - cx, - ); - keymap_editor - }; - - if let Some(filter) = filter { - keymap_editor.update(cx, |editor, cx| { - editor.filter_editor.update(cx, |editor, cx| { - editor.clear(window, cx); - editor.insert(&filter, window, cx); - }); - if !editor.has_binding_for(&filter) { - open_binding_modal_after_loading(cx) - } - }) - } - }) - .detach(); - }) + fn open_keymap_editor( + filter: Option, + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + workspace + .with_local_workspace(window, cx, |workspace, window, cx| { + let existing = workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()); + + let keymap_editor = if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + existing + } else { + let keymap_editor = + cx.new(|cx| KeymapEditor::new(workspace.weak_handle(), window, cx)); + workspace.add_item_to_active_pane( + Box::new(keymap_editor.clone()), + None, + true, + window, + cx, + ); + keymap_editor + }; + + if let Some(filter) = filter { + keymap_editor.update(cx, |editor, cx| { + editor.filter_editor.update(cx, |editor, cx| { + editor.clear(window, cx); + editor.insert(&filter, window, cx); + }); + if !editor.has_binding_for(&filter) { + open_binding_modal_after_loading(cx) + } + }) + } + }) + .detach_and_log_err(cx); } - cx.on_action(|_: &OpenKeymap, cx| common(None, cx)); - cx.on_action(|action: &ChangeKeybinding, cx| common(Some(action.action.clone()), cx)); + cx.observe_new(|workspace: &mut Workspace, _window, _cx| { + workspace + .register_action(|workspace, _: &OpenKeymap, window, cx| { + open_keymap_editor(None, workspace, window, cx); + }) + .register_action(|workspace, action: &ChangeKeybinding, window, cx| { + open_keymap_editor(Some(action.action.clone()), workspace, window, cx); + }); + }) + .detach(); register_serializable_item::(cx); } @@ -184,7 +195,7 @@ enum SearchMode { impl SearchMode { fn invert(&self) -> Self { match self { - SearchMode::Normal => SearchMode::KeyStroke { exact_match: false }, + SearchMode::Normal => SearchMode::KeyStroke { exact_match: true }, SearchMode::KeyStroke { .. } => SearchMode::Normal, } } @@ -900,7 +911,7 @@ impl KeymapEditor { .focus_handle(cx) .contains_focused(window, cx) { - window.focus(&self.filter_editor.focus_handle(cx)); + window.focus(&self.filter_editor.focus_handle(cx), cx); } else { self.filter_editor.update(cx, |editor, cx| { editor.select_all(&Default::default(), window, cx); @@ -937,7 +948,7 @@ impl KeymapEditor { if let Some(scroll_strategy) = scroll { self.scroll_to_item(index, scroll_strategy, cx); } - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); cx.notify(); } } @@ -958,12 +969,14 @@ impl KeymapEditor { let context_menu = ContextMenu::build(window, cx, |menu, _window, _cx| { menu.context(self.focus_handle.clone()) + .when(selected_binding_is_unbound, |this| { + this.action("Create", Box::new(CreateBinding)) + }) .action_disabled_when( selected_binding_is_unbound, "Edit", Box::new(EditBinding), ) - .action("Create", Box::new(CreateBinding)) .action_disabled_when( selected_binding_is_unbound, "Delete", @@ -985,7 +998,7 @@ impl KeymapEditor { }); let context_menu_handle = context_menu.focus_handle(cx); - window.defer(cx, move |window, _cx| window.focus(&context_menu_handle)); + window.defer(cx, move |window, cx| window.focus(&context_menu_handle, cx)); let subscription = cx.subscribe_in( &context_menu, window, @@ -1001,7 +1014,7 @@ impl KeymapEditor { fn dismiss_context_menu(&mut self, window: &mut Window, cx: &mut Context) { self.context_menu.take(); - window.focus(&self.focus_handle); + window.focus(&self.focus_handle, cx); cx.notify(); } @@ -1217,7 +1230,7 @@ impl KeymapEditor { window, cx, ); - window.focus(&modal.focus_handle(cx)); + window.focus(&modal.focus_handle(cx), cx); modal }); }) @@ -1325,7 +1338,7 @@ impl KeymapEditor { editor.stop_recording(&StopRecording, window, cx); editor.clear_keystrokes(&ClearKeystrokes, window, cx); }); - window.focus(&self.filter_editor.focus_handle(cx)); + window.focus(&self.filter_editor.focus_handle(cx), cx); } } } @@ -1598,9 +1611,33 @@ impl Item for KeymapEditor { impl Render for KeymapEditor { fn render(&mut self, _window: &mut Window, cx: &mut ui::Context) -> impl ui::IntoElement { + if let SearchMode::KeyStroke { exact_match } = self.search_mode { + let button = IconButton::new("keystrokes-exact-match", IconName::CaseSensitive) + .tooltip(move |_window, cx| { + Tooltip::for_action( + "Toggle Exact Match Mode", + &ToggleExactKeystrokeMatching, + cx, + ) + }) + .shape(IconButtonShape::Square) + .toggle_state(exact_match) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action(ToggleExactKeystrokeMatching.boxed_clone(), cx); + })); + + self.keystroke_editor.update(cx, |editor, _| { + editor.actions_slot = Some(button.into_any_element()); + }); + } else { + self.keystroke_editor.update(cx, |editor, _| { + editor.actions_slot = None; + }); + } + let row_count = self.matches.len(); - let theme = cx.theme(); let focus_handle = &self.focus_handle; + let theme = cx.theme(); v_flex() .id("keymap-editor") @@ -1743,7 +1780,7 @@ impl Render for KeymapEditor { ) .action( "Vim Bindings", - vim::OpenDefaultKeymap.boxed_clone(), + zed_actions::vim::OpenDefaultKeymap.boxed_clone(), ) })) }) @@ -1784,49 +1821,14 @@ impl Render for KeymapEditor { ) ), ) - .when_some( - match self.search_mode { - SearchMode::Normal => None, - SearchMode::KeyStroke { exact_match } => Some(exact_match), - }, - |this, exact_match| { + .when( + matches!(self.search_mode, SearchMode::KeyStroke { .. }), + |this| { this.child( h_flex() .gap_2() .child(self.keystroke_editor.clone()) - .child( - h_flex() - .min_w_64() - .child( - IconButton::new( - "keystrokes-exact-match", - IconName::CaseSensitive, - ) - .tooltip({ - let keystroke_focus_handle = - self.keystroke_editor.read(cx).focus_handle(cx); - - move |_window, cx| { - Tooltip::for_action_in( - "Toggle Exact Match Mode", - &ToggleExactKeystrokeMatching, - &keystroke_focus_handle, - cx, - ) - } - }) - .shape(IconButtonShape::Square) - .toggle_state(exact_match) - .on_click( - cx.listener(|_, _, window, cx| { - window.dispatch_action( - ToggleExactKeystrokeMatching.boxed_clone(), - cx, - ); - }), - ), - ), - ) + .child(div().min_w_64()), // Spacer div to align with the search input ) }, ), @@ -2696,32 +2698,32 @@ impl KeybindingEditorModalFocusState { .map(|i| i as i32) } - fn focus_index(&self, mut index: i32, window: &mut Window) { + fn focus_index(&self, mut index: i32, window: &mut Window, cx: &mut App) { if index < 0 { index = self.handles.len() as i32 - 1; } if index >= self.handles.len() as i32 { index = 0; } - window.focus(&self.handles[index as usize]); + window.focus(&self.handles[index as usize], cx); } - fn focus_next(&self, window: &mut Window, cx: &App) { + fn focus_next(&self, window: &mut Window, cx: &mut App) { let index_to_focus = if let Some(index) = self.focused_index(window, cx) { index + 1 } else { 0 }; - self.focus_index(index_to_focus, window); + self.focus_index(index_to_focus, window, cx); } - fn focus_previous(&self, window: &mut Window, cx: &App) { + fn focus_previous(&self, window: &mut Window, cx: &mut App) { let index_to_focus = if let Some(index) = self.focused_index(window, cx) { index - 1 } else { self.handles.len() as i32 - 1 }; - self.focus_index(index_to_focus, window); + self.focus_index(index_to_focus, window, cx); } } @@ -2755,7 +2757,7 @@ impl ActionArgumentsEditor { ) -> Self { let focus_handle = cx.focus_handle(); cx.on_focus_in(&focus_handle, window, |this, window, cx| { - this.editor.focus_handle(cx).focus(window); + this.editor.focus_handle(cx).focus(window, cx); }) .detach(); let editor = cx.new(|cx| { @@ -2808,7 +2810,7 @@ impl ActionArgumentsEditor { this.update_in(cx, |this, window, cx| { if this.editor.focus_handle(cx).is_focused(window) { - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); } this.editor = editor; this.backup_temp_dir = backup_temp_dir; @@ -2993,6 +2995,8 @@ impl CompletionProvider for KeyContextCompletionProvider { documentation: None, source: project::CompletionSource::Custom, icon_path: None, + match_start: None, + snippet_deduplication_key: None, insert_text_mode: None, confirm: None, }) @@ -3008,7 +3012,6 @@ impl CompletionProvider for KeyContextCompletionProvider { _position: language::Anchor, text: &str, _trigger_in_words: bool, - _menu_is_open: bool, _cx: &mut Context, ) -> bool { text.chars() diff --git a/crates/keymap_editor/src/ui_components/keystroke_input.rs b/crates/keymap_editor/src/ui_components/keystroke_input.rs index 5f85e5124f84dc6fc9a9f3ab95e72f15dc5fefeb..496a8ae7e6359bc169845542a0f05800008a4786 100644 --- a/crates/keymap_editor/src/ui_components/keystroke_input.rs +++ b/crates/keymap_editor/src/ui_components/keystroke_input.rs @@ -64,6 +64,7 @@ pub struct KeystrokeInput { clear_close_keystrokes_timer: Option>, #[cfg(test)] recording: bool, + pub actions_slot: Option, } impl KeystrokeInput { @@ -94,6 +95,7 @@ impl KeystrokeInput { clear_close_keystrokes_timer: None, #[cfg(test)] recording: false, + actions_slot: None, } } @@ -386,7 +388,7 @@ impl KeystrokeInput { window: &mut Window, cx: &mut Context, ) { - window.focus(&self.inner_focus_handle); + window.focus(&self.inner_focus_handle, cx); self.clear_keystrokes(&ClearKeystrokes, window, cx); self.previous_modifiers = window.modifiers(); #[cfg(test)] @@ -405,7 +407,7 @@ impl KeystrokeInput { if !self.is_recording(window) { return; } - window.focus(&self.outer_focus_handle); + window.focus(&self.outer_focus_handle, cx); if let Some(close_keystrokes_start) = self.close_keystrokes_start.take() && close_keystrokes_start < self.keystrokes.len() { @@ -445,6 +447,11 @@ impl KeystrokeInput { // not get de-synced self.inner_focus_handle.is_focused(window) } + + pub fn actions_slot(mut self, action: impl IntoElement) -> Self { + self.actions_slot = Some(action.into_any_element()); + self + } } impl EventEmitter<()> for KeystrokeInput {} @@ -586,7 +593,7 @@ impl Render for KeystrokeInput { .min_w_0() .justify_center() .flex_wrap() - .gap(ui::DynamicSpacing::Base04.rems(cx)) + .gap_1() .children(self.render_keystrokes(is_recording)), ) .child( @@ -636,18 +643,25 @@ impl Render for KeystrokeInput { ) } }) - .child( - IconButton::new("clear-btn", IconName::Backspace) - .shape(IconButtonShape::Square) - .tooltip(Tooltip::for_action_title( - "Clear Keystrokes", - &ClearKeystrokes, - )) - .when(!is_focused, |this| this.icon_color(Color::Muted)) - .on_click(cx.listener(|this, _event, window, cx| { - this.clear_keystrokes(&ClearKeystrokes, window, cx); - })), - ), + .when_some(self.actions_slot.take(), |this, action| this.child(action)) + .when(is_recording, |this| { + this.child( + IconButton::new("clear-btn", IconName::Backspace) + .shape(IconButtonShape::Square) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Clear Keystrokes", + Some(&ClearKeystrokes), + "Hit it three times to execute", + cx, + ) + }) + .when(!is_focused, |this| this.icon_color(Color::Muted)) + .on_click(cx.listener(|this, _event, window, cx| { + this.clear_keystrokes(&ClearKeystrokes, window, cx); + })), + ) + }), ) } } diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index ffc5ad85d14c293eeeaff9172b21ef58cf9a1cf0..06d41e729bfabbf4f7e050409d2675dd909941d6 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -21,6 +21,7 @@ test-support = [ "tree-sitter-rust", "tree-sitter-python", "tree-sitter-typescript", + "tree-sitter-md", "settings/test-support", "util/test-support", ] @@ -31,6 +32,7 @@ async-trait.workspace = true clock.workspace = true collections.workspace = true ec4rs.workspace = true +encoding_rs.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true @@ -47,6 +49,7 @@ rand = { workspace = true, optional = true } regex.workspace = true rpc.workspace = true schemars.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true @@ -59,6 +62,7 @@ sum_tree.workspace = true task.workspace = true text.workspace = true theme.workspace = true +tree-sitter-md = { workspace = true, optional = true } tree-sitter-python = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ea2405d04c32cba45963bc32747ee0b94292ffd9..abf4d9b10a761b9c0247145e8ddb0664127756d2 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,15 +1,19 @@ +pub mod row_chunk; + use crate::{ - DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, - TextObject, TreeSitterOptions, + DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture, + RunnableTag, TextObject, TreeSitterOptions, diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup}, language_settings::{LanguageSettings, language_settings}, outline::OutlineItem, + row_chunk::RowChunks, syntax_map::{ - SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch, - SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint, + MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, + SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint, }, task_context::RunnableRange, text_diff::text_diff, + unified_diff, }; pub use crate::{ Grammar, Language, LanguageRegistry, @@ -20,7 +24,8 @@ pub use crate::{ use anyhow::{Context as _, Result}; use clock::Lamport; pub use clock::ReplicaId; -use collections::HashMap; +use collections::{HashMap, HashSet}; +use encoding_rs::Encoding; use fs::MTime; use futures::channel::oneshot; use gpui::{ @@ -126,6 +131,39 @@ pub struct Buffer { has_unsaved_edits: Cell<(clock::Global, bool)>, change_bits: Vec>>, _subscriptions: Vec, + tree_sitter_data: Arc, + encoding: &'static Encoding, + has_bom: bool, +} + +#[derive(Debug)] +pub struct TreeSitterData { + chunks: RowChunks, + brackets_by_chunks: Mutex>>>>, +} + +const MAX_ROWS_IN_A_CHUNK: u32 = 50; + +impl TreeSitterData { + fn clear(&mut self, snapshot: text::BufferSnapshot) { + self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK); + self.brackets_by_chunks.get_mut().clear(); + self.brackets_by_chunks + .get_mut() + .resize(self.chunks.len(), None); + } + + fn new(snapshot: text::BufferSnapshot) -> Self { + let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK); + Self { + brackets_by_chunks: Mutex::new(vec![None; chunks.len()]), + chunks, + } + } + + fn version(&self) -> &clock::Global { + self.chunks.version() + } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -149,6 +187,7 @@ pub struct BufferSnapshot { remote_selections: TreeMap, language: Option>, non_text_state_update_count: usize, + tree_sitter_data: Arc, } /// The kind and amount of indentation in a particular line. For now, @@ -209,6 +248,8 @@ struct SelectionSet { pub struct Diagnostic { /// The name of the service that produced this diagnostic. pub source: Option, + /// The ID provided by the dynamic registration that produced this diagnostic. + pub registration_id: Option, /// A machine-readable code that identifies this diagnostic. pub code: Option, pub code_description: Option, @@ -323,7 +364,8 @@ pub enum BufferEvent { /// The buffer is in need of a reload ReloadNeeded, /// The buffer's language was changed. - LanguageChanged, + /// The boolean indicates whether this buffer did not have a language before, but does now. + LanguageChanged(bool), /// The buffer's syntax trees were updated. Reparsed, /// The buffer's diagnostics were updated. @@ -717,6 +759,33 @@ pub struct EditPreview { } impl EditPreview { + pub fn as_unified_diff(&self, edits: &[(Range, impl AsRef)]) -> Option { + let (first, _) = edits.first()?; + let (last, _) = edits.last()?; + + let start = first.start.to_point(&self.old_snapshot); + let old_end = last.end.to_point(&self.old_snapshot); + let new_end = last + .end + .bias_right(&self.old_snapshot) + .to_point(&self.applied_edits_snapshot); + + let start = Point::new(start.row.saturating_sub(3), 0); + let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point()); + let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point()); + + Some(unified_diff( + &self + .old_snapshot + .text_for_range(start..old_end) + .collect::(), + &self + .applied_edits_snapshot + .text_for_range(start..new_end) + .collect::(), + )) + } + pub fn highlight_edits( &self, current_snapshot: &BufferSnapshot, @@ -730,6 +799,8 @@ impl EditPreview { let mut highlighted_text = HighlightedTextBuilder::default(); + let visible_range_in_preview_snapshot = + visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot); let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start; let insertion_highlight_style = HighlightStyle { @@ -797,7 +868,19 @@ impl EditPreview { highlighted_text.build() } - fn compute_visible_range(&self, edits: &[(Range, T)]) -> Option> { + pub fn build_result_buffer(&self, cx: &mut App) -> Entity { + cx.new(|cx| { + let mut buffer = Buffer::local_normalized( + self.applied_edits_snapshot.as_rope().clone(), + self.applied_edits_snapshot.line_ending(), + cx, + ); + buffer.set_language_async(self.syntax_snapshot.root_language(), cx); + buffer + }) + } + + pub fn compute_visible_range(&self, edits: &[(Range, T)]) -> Option> { let (first, _) = edits.first()?; let (last, _) = edits.last()?; @@ -814,15 +897,23 @@ impl EditPreview { let range = Point::new(start.row, 0) ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row)); - Some(range.to_offset(&self.applied_edits_snapshot)) + Some(range) } } #[derive(Clone, Debug, PartialEq, Eq)] -pub struct BracketMatch { - pub open_range: Range, - pub close_range: Range, +pub struct BracketMatch { + pub open_range: Range, + pub close_range: Range, pub newline_only: bool, + pub syntax_layer_depth: usize, + pub color_index: Option, +} + +impl BracketMatch { + pub fn bracket_ranges(self) -> (Range, Range) { + (self.open_range, self.close_range) + } } impl Buffer { @@ -953,6 +1044,12 @@ impl Buffer { } /// Assign a language to the buffer, returning the buffer. + pub fn with_language_async(mut self, language: Arc, cx: &mut Context) -> Self { + self.set_language_async(Some(language), cx); + self + } + + /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer. pub fn with_language(mut self, language: Arc, cx: &mut Context) -> Self { self.set_language(Some(language), cx); self @@ -973,8 +1070,10 @@ impl Buffer { let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime()); let snapshot = buffer.snapshot(); let syntax_map = Mutex::new(SyntaxMap::new(&snapshot)); + let tree_sitter_data = TreeSitterData::new(snapshot); Self { saved_mtime, + tree_sitter_data: Arc::new(tree_sitter_data), saved_version: buffer.version(), preview_version: buffer.version(), reload_task: None, @@ -1004,6 +1103,8 @@ impl Buffer { has_conflict: false, change_bits: Default::default(), _subscriptions: Vec::new(), + encoding: encoding_rs::UTF_8, + has_bom: false, } } @@ -1024,12 +1125,14 @@ impl Buffer { let language_registry = language_registry.clone(); syntax.reparse(&text, language_registry, language); } + let tree_sitter_data = TreeSitterData::new(text.clone()); BufferSnapshot { text, syntax, file: None, diagnostics: Default::default(), remote_selections: Default::default(), + tree_sitter_data: Arc::new(tree_sitter_data), language, non_text_state_update_count: 0, } @@ -1047,9 +1150,11 @@ impl Buffer { ) .snapshot(); let syntax = SyntaxMap::new(&text).snapshot(); + let tree_sitter_data = TreeSitterData::new(text.clone()); BufferSnapshot { text, syntax, + tree_sitter_data: Arc::new(tree_sitter_data), file: None, diagnostics: Default::default(), remote_selections: Default::default(), @@ -1074,9 +1179,11 @@ impl Buffer { if let Some(language) = language.clone() { syntax.reparse(&text, language_registry, language); } + let tree_sitter_data = TreeSitterData::new(text.clone()); BufferSnapshot { text, syntax, + tree_sitter_data: Arc::new(tree_sitter_data), file: None, diagnostics: Default::default(), remote_selections: Default::default(), @@ -1093,9 +1200,16 @@ impl Buffer { syntax_map.interpolate(&text); let syntax = syntax_map.snapshot(); + let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() { + Arc::new(TreeSitterData::new(text.clone())) + } else { + self.tree_sitter_data.clone() + }; + BufferSnapshot { text, syntax, + tree_sitter_data, file: self.file.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), @@ -1123,7 +1237,7 @@ impl Buffer { } // Reparse the branch buffer so that we get syntax highlighting immediately. - branch.reparse(cx); + branch.reparse(cx, true); branch }) @@ -1274,14 +1388,50 @@ impl Buffer { self.saved_mtime } + /// Returns the character encoding of the buffer's file. + pub fn encoding(&self) -> &'static Encoding { + self.encoding + } + + /// Sets the character encoding of the buffer. + pub fn set_encoding(&mut self, encoding: &'static Encoding) { + self.encoding = encoding; + } + + /// Returns whether the buffer has a Byte Order Mark. + pub fn has_bom(&self) -> bool { + self.has_bom + } + + /// Sets whether the buffer has a Byte Order Mark. + pub fn set_has_bom(&mut self, has_bom: bool) { + self.has_bom = has_bom; + } + /// Assign a language to the buffer. + pub fn set_language_async(&mut self, language: Option>, cx: &mut Context) { + self.set_language_(language, cfg!(any(test, feature = "test-support")), cx); + } + + /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer. pub fn set_language(&mut self, language: Option>, cx: &mut Context) { + self.set_language_(language, true, cx); + } + + fn set_language_( + &mut self, + language: Option>, + may_block: bool, + cx: &mut Context, + ) { self.non_text_state_update_count += 1; self.syntax_map.lock().clear(&self.text); - self.language = language; + let old_language = std::mem::replace(&mut self.language, language); self.was_changed(); - self.reparse(cx); - cx.emit(BufferEvent::LanguageChanged); + self.reparse(cx, may_block); + let has_fresh_language = + self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT); + cx.emit(BufferEvent::LanguageChanged(has_fresh_language)); } /// Assign a language registry to the buffer. This allows the buffer to retrieve @@ -1513,6 +1663,16 @@ impl Buffer { self.sync_parse_timeout = timeout; } + fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) { + match Arc::get_mut(&mut self.tree_sitter_data) { + Some(tree_sitter_data) => tree_sitter_data.clear(snapshot), + None => { + let tree_sitter_data = TreeSitterData::new(snapshot); + self.tree_sitter_data = Arc::new(tree_sitter_data) + } + } + } + /// Called after an edit to synchronize the buffer's main parse tree with /// the buffer's new underlying state. /// @@ -1523,9 +1683,9 @@ impl Buffer { /// The snapshot with the interpolated edits is sent to a background thread, /// where we ask Tree-sitter to perform an incremental parse. /// - /// Meanwhile, in the foreground, we block the main thread for up to 1ms - /// waiting on the parse to complete. As soon as it completes, we proceed - /// synchronously, unless a 1ms timeout elapses. + /// Meanwhile, in the foreground if `may_block` is true, we block the main + /// thread for up to 1ms waiting on the parse to complete. As soon as it + /// completes, we proceed synchronously, unless a 1ms timeout elapses. /// /// If we time out waiting on the parse, we spawn a second task waiting /// until the parse does complete and return with the interpolated tree still @@ -1536,7 +1696,10 @@ impl Buffer { /// initiate an additional reparse recursively. To avoid concurrent parses /// for the same buffer, we only initiate a new parse if we are not already /// parsing in the background. - pub fn reparse(&mut self, cx: &mut Context) { + pub fn reparse(&mut self, cx: &mut Context, may_block: bool) { + if self.text.version() != *self.tree_sitter_data.version() { + self.invalidate_tree_sitter_data(self.text.snapshot()); + } if self.reparse.is_some() { return; } @@ -1565,42 +1728,70 @@ impl Buffer { }); self.parse_status.0.send(ParseStatus::Parsing).unwrap(); - match cx - .background_executor() - .block_with_timeout(self.sync_parse_timeout, parse_task) - { - Ok(new_syntax_snapshot) => { - self.did_finish_parsing(new_syntax_snapshot, cx); - self.reparse = None; - } - Err(parse_task) => { - // todo(lw): hot foreground spawn - self.reparse = Some(cx.spawn(async move |this, cx| { - let new_syntax_map = cx.background_spawn(parse_task).await; - this.update(cx, move |this, cx| { - let grammar_changed = || { - this.language.as_ref().is_none_or(|current_language| { - !Arc::ptr_eq(&language, current_language) - }) - }; - let language_registry_changed = || { - new_syntax_map.contains_unknown_injections() - && language_registry.is_some_and(|registry| { - registry.version() != new_syntax_map.language_registry_version() + if may_block { + match cx + .background_executor() + .block_with_timeout(self.sync_parse_timeout, parse_task) + { + Ok(new_syntax_snapshot) => { + self.did_finish_parsing(new_syntax_snapshot, cx); + self.reparse = None; + } + Err(parse_task) => { + self.reparse = Some(cx.spawn(async move |this, cx| { + let new_syntax_map = cx.background_spawn(parse_task).await; + this.update(cx, move |this, cx| { + let grammar_changed = || { + this.language.as_ref().is_none_or(|current_language| { + !Arc::ptr_eq(&language, current_language) }) - }; - let parse_again = this.version.changed_since(&parsed_version) - || language_registry_changed() - || grammar_changed(); - this.did_finish_parsing(new_syntax_map, cx); - this.reparse = None; - if parse_again { - this.reparse(cx); - } - }) - .ok(); - })); + }; + let language_registry_changed = || { + new_syntax_map.contains_unknown_injections() + && language_registry.is_some_and(|registry| { + registry.version() + != new_syntax_map.language_registry_version() + }) + }; + let parse_again = this.version.changed_since(&parsed_version) + || language_registry_changed() + || grammar_changed(); + this.did_finish_parsing(new_syntax_map, cx); + this.reparse = None; + if parse_again { + this.reparse(cx, false); + } + }) + .ok(); + })); + } } + } else { + self.reparse = Some(cx.spawn(async move |this, cx| { + let new_syntax_map = cx.background_spawn(parse_task).await; + this.update(cx, move |this, cx| { + let grammar_changed = || { + this.language.as_ref().is_none_or(|current_language| { + !Arc::ptr_eq(&language, current_language) + }) + }; + let language_registry_changed = || { + new_syntax_map.contains_unknown_injections() + && language_registry.is_some_and(|registry| { + registry.version() != new_syntax_map.language_registry_version() + }) + }; + let parse_again = this.version.changed_since(&parsed_version) + || language_registry_changed() + || grammar_changed(); + this.did_finish_parsing(new_syntax_map, cx); + this.reparse = None; + if parse_again { + this.reparse(cx, false); + } + }) + .ok(); + })); } } @@ -1610,6 +1801,9 @@ impl Buffer { self.syntax_map.lock().did_parse(syntax_snapshot); self.request_autoindent(cx); self.parse_status.0.send(ParseStatus::Idle).unwrap(); + if self.text.version() != *self.tree_sitter_data.version() { + self.invalidate_tree_sitter_data(self.text.snapshot()); + } cx.emit(BufferEvent::Reparsed); cx.notify(); } @@ -2055,6 +2249,11 @@ impl Buffer { } } + /// Marks the buffer as having a conflict regardless of current buffer state. + pub fn set_conflict(&mut self) { + self.has_conflict = true; + } + /// Checks if the buffer and its file have both changed since the buffer /// was last saved or reloaded. pub fn has_conflict(&self) -> bool { @@ -2077,7 +2276,7 @@ impl Buffer { } /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text. - pub fn subscribe(&mut self) -> Subscription { + pub fn subscribe(&mut self) -> Subscription { self.text.subscribe() } @@ -2495,7 +2694,7 @@ impl Buffer { return; } - self.reparse(cx); + self.reparse(cx, true); cx.emit(BufferEvent::Edited); if was_dirty != self.is_dirty() { cx.emit(BufferEvent::DirtyChanged); @@ -3042,15 +3241,22 @@ impl BufferSnapshot { struct StartPosition { start: Point, suffix: SharedString, + language: Arc, } // Find the suggested indentation ranges based on the syntax tree. let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0); let end = Point::new(row_range.end, 0); let range = (start..end).to_offset(&self.text); - let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| { - Some(&grammar.indents_config.as_ref()?.query) - }); + let mut matches = self.syntax.matches_with_options( + range.clone(), + &self.text, + TreeSitterOptions { + max_bytes_to_query: Some(MAX_BYTES_TO_QUERY), + max_start_depth: None, + }, + |grammar| Some(&grammar.indents_config.as_ref()?.query), + ); let indent_configs = matches .grammars() .iter() @@ -3079,6 +3285,7 @@ impl BufferSnapshot { start_positions.push(StartPosition { start: Point::from_ts_point(capture.node.start_position()), suffix: suffix.clone(), + language: mat.language.clone(), }); } } @@ -3129,8 +3336,7 @@ impl BufferSnapshot { // set its end to the outdent position if let Some(range_to_truncate) = indent_ranges .iter_mut() - .filter(|indent_range| indent_range.contains(&outdent_position)) - .next_back() + .rfind(|indent_range| indent_range.contains(&outdent_position)) { range_to_truncate.end = outdent_position; } @@ -3140,7 +3346,7 @@ impl BufferSnapshot { // Find the suggested indentation increases and decreased based on regexes. let mut regex_outdent_map = HashMap::default(); - let mut last_seen_suffix: HashMap> = HashMap::default(); + let mut last_seen_suffix: HashMap> = HashMap::default(); let mut start_positions_iter = start_positions.iter().peekable(); let mut indent_change_rows = Vec::<(u32, Ordering)>::new(); @@ -3148,14 +3354,21 @@ impl BufferSnapshot { Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0) ..Point::new(row_range.end, 0), |row, line| { - if config + let indent_len = self.indent_size_for_line(row).len; + let row_language = self.language_at(Point::new(row, indent_len)).cloned(); + let row_language_config = row_language + .as_ref() + .map(|lang| lang.config()) + .unwrap_or(config); + + if row_language_config .decrease_indent_pattern .as_ref() .is_some_and(|regex| regex.is_match(line)) { indent_change_rows.push((row, Ordering::Less)); } - if config + if row_language_config .increase_indent_pattern .as_ref() .is_some_and(|regex| regex.is_match(line)) @@ -3164,16 +3377,16 @@ impl BufferSnapshot { } while let Some(pos) = start_positions_iter.peek() { if pos.start.row < row { - let pos = start_positions_iter.next().unwrap(); + let pos = start_positions_iter.next().unwrap().clone(); last_seen_suffix .entry(pos.suffix.to_string()) .or_default() - .push(pos.start); + .push(pos); } else { break; } } - for rule in &config.decrease_indent_patterns { + for rule in &row_language_config.decrease_indent_patterns { if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) { let row_start_column = self.indent_size_for_line(row).len; let basis_row = rule @@ -3181,10 +3394,16 @@ impl BufferSnapshot { .iter() .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix)) .flatten() - .filter(|start_point| start_point.column <= row_start_column) - .max_by_key(|start_point| start_point.row); - if let Some(outdent_to_row) = basis_row { - regex_outdent_map.insert(row, outdent_to_row.row); + .filter(|pos| { + row_language + .as_ref() + .or(self.language.as_ref()) + .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language)) + }) + .filter(|pos| pos.start.column <= row_start_column) + .max_by_key(|pos| pos.start.row); + if let Some(outdent_to) = basis_row { + regex_outdent_map.insert(row, outdent_to.start.row); } break; } @@ -3880,6 +4099,20 @@ impl BufferSnapshot { }) } + pub fn outline_items_as_offsets_containing( + &self, + range: Range, + include_extra_context: bool, + theme: Option<&SyntaxTheme>, + ) -> Vec> { + self.outline_items_containing_internal( + range, + include_extra_context, + theme, + |buffer, range| range.to_offset(buffer), + ) + } + fn outline_items_containing_internal( &self, range: Range, @@ -4114,24 +4347,60 @@ impl BufferSnapshot { self.syntax.matches(range, self, query) } - pub fn all_bracket_ranges( + /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks. + /// Hence, may return more bracket pairs than the range contains. + /// + /// Will omit known chunks. + /// The resulting bracket match collections are not ordered. + pub fn fetch_bracket_ranges( &self, range: Range, - ) -> impl Iterator + '_ { - let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| { - grammar.brackets_config.as_ref().map(|c| &c.query) - }); - let configs = matches - .grammars() - .iter() - .map(|grammar| grammar.brackets_config.as_ref().unwrap()) - .collect::>(); + known_chunks: Option<&HashSet>>, + ) -> HashMap, Vec>> { + let mut all_bracket_matches = HashMap::default(); + + for chunk in self + .tree_sitter_data + .chunks + .applicable_chunks(&[range.to_point(self)]) + { + if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) { + continue; + } + let chunk_range = chunk.anchor_range(); + let chunk_range = chunk_range.to_offset(&self); + + if let Some(cached_brackets) = + &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id] + { + all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone()); + continue; + } + + let mut all_brackets = Vec::new(); + let mut opens = Vec::new(); + let mut color_pairs = Vec::new(); + + let mut matches = self.syntax.matches_with_options( + chunk_range.clone(), + &self.text, + TreeSitterOptions { + max_bytes_to_query: Some(MAX_BYTES_TO_QUERY), + max_start_depth: None, + }, + |grammar| grammar.brackets_config.as_ref().map(|c| &c.query), + ); + let configs = matches + .grammars() + .iter() + .map(|grammar| grammar.brackets_config.as_ref().unwrap()) + .collect::>(); - iter::from_fn(move || { while let Some(mat) = matches.peek() { let mut open = None; let mut close = None; - let config = &configs[mat.grammar_index]; + let syntax_layer_depth = mat.depth; + let config = configs[mat.grammar_index]; let pattern = &config.patterns[mat.pattern_index]; for capture in mat.captures { if capture.index == config.open_capture_ix { @@ -4148,25 +4417,83 @@ impl BufferSnapshot { }; let bracket_range = open_range.start..=close_range.end; - if !bracket_range.overlaps(&range) { + if !bracket_range.overlaps(&chunk_range) { continue; } - return Some(BracketMatch { - open_range, - close_range, + let index = all_brackets.len(); + all_brackets.push(BracketMatch { + open_range: open_range.clone(), + close_range: close_range.clone(), newline_only: pattern.newline_only, + syntax_layer_depth, + color_index: None, }); + + // Certain languages have "brackets" that are not brackets, e.g. tags. and such + // bracket will match the entire tag with all text inside. + // For now, avoid highlighting any pair that has more than single char in each bracket. + // We need to colorize `` bracket pairs, so cannot make this check stricter. + let should_color = + !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1); + if should_color { + opens.push(open_range.clone()); + color_pairs.push((open_range, close_range, index)); + } } - None - }) + + opens.sort_by_key(|r| (r.start, r.end)); + opens.dedup_by(|a, b| a.start == b.start && a.end == b.end); + color_pairs.sort_by_key(|(_, close, _)| close.end); + + let mut open_stack = Vec::new(); + let mut open_index = 0; + for (open, close, index) in color_pairs { + while open_index < opens.len() && opens[open_index].start < close.start { + open_stack.push(opens[open_index].clone()); + open_index += 1; + } + + if open_stack.last() == Some(&open) { + let depth_index = open_stack.len() - 1; + all_brackets[index].color_index = Some(depth_index); + open_stack.pop(); + } + } + + all_brackets.sort_by_key(|bracket_match| { + (bracket_match.open_range.start, bracket_match.open_range.end) + }); + + if let empty_slot @ None = + &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id] + { + *empty_slot = Some(all_brackets.clone()); + } + all_bracket_matches.insert(chunk.row_range(), all_brackets); + } + + all_bracket_matches + } + + pub fn all_bracket_ranges( + &self, + range: Range, + ) -> impl Iterator> { + self.fetch_bracket_ranges(range.clone(), None) + .into_values() + .flatten() + .filter(move |bracket_match| { + let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end; + bracket_range.overlaps(&range) + }) } /// Returns bracket range pairs overlapping or adjacent to `range` pub fn bracket_ranges( &self, range: Range, - ) -> impl Iterator + '_ { + ) -> impl Iterator> + '_ { // Find bracket pairs that *inclusively* contain the given range. let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self); self.all_bracket_ranges(range) @@ -4312,11 +4639,19 @@ impl BufferSnapshot { pub fn enclosing_bracket_ranges( &self, range: Range, - ) -> impl Iterator + '_ { + ) -> impl Iterator> + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.bracket_ranges(range.clone()).filter(move |pair| { - pair.open_range.start <= range.start && pair.close_range.end >= range.end + let result: Vec<_> = self.bracket_ranges(range.clone()).collect(); + let max_depth = result + .iter() + .map(|mat| mat.syntax_layer_depth) + .max() + .unwrap_or(0); + result.into_iter().filter(move |pair| { + pair.open_range.start <= range.start + && pair.close_range.end >= range.end + && pair.syntax_layer_depth == max_depth }) } @@ -4803,6 +5138,7 @@ impl Clone for BufferSnapshot { remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), language: self.language.clone(), + tree_sitter_data: self.tree_sitter_data.clone(), non_text_state_update_count: self.non_text_state_update_count, } } @@ -5117,6 +5453,7 @@ impl Default for Diagnostic { is_unnecessary: false, underline: true, data: None, + registration_id: None, } } } diff --git a/crates/language/src/buffer/row_chunk.rs b/crates/language/src/buffer/row_chunk.rs new file mode 100644 index 0000000000000000000000000000000000000000..0f3c0b5afb1cc1a2d60a2a568fe00403733ef5c6 --- /dev/null +++ b/crates/language/src/buffer/row_chunk.rs @@ -0,0 +1,119 @@ +//! A row chunk is an exclusive range of rows, [`BufferRow`] within a buffer of a certain version, [`Global`]. +//! All but the last chunk are of a constant, given size. + +use std::{ops::Range, sync::Arc}; + +use text::{Anchor, OffsetRangeExt as _, Point}; +use util::RangeExt; + +use crate::BufferRow; + +/// An range of rows, exclusive as [`lsp::Range`] and +/// +/// denote. +/// +/// Represents an area in a text editor, adjacent to other ones. +/// Together, chunks form entire document at a particular version [`Global`]. +/// Each chunk is queried for inlays as `(start_row, 0)..(end_exclusive, 0)` via +/// +#[derive(Clone)] +pub struct RowChunks { + chunks: Arc<[RowChunk]>, + version: clock::Global, +} + +impl std::fmt::Debug for RowChunks { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RowChunks") + .field("chunks", &self.chunks) + .finish() + } +} + +impl RowChunks { + pub fn new(snapshot: text::BufferSnapshot, max_rows_per_chunk: u32) -> Self { + let buffer_point_range = (0..snapshot.len()).to_point(&snapshot); + let last_row = buffer_point_range.end.row; + let chunks = (buffer_point_range.start.row..=last_row) + .step_by(max_rows_per_chunk as usize) + .collect::>(); + let last_chunk_id = chunks.len() - 1; + let chunks = chunks + .into_iter() + .enumerate() + .map(|(id, chunk_start)| { + let start = Point::new(chunk_start, 0); + let end_exclusive = (chunk_start + max_rows_per_chunk).min(last_row); + let end = if id == last_chunk_id { + Point::new(end_exclusive, snapshot.line_len(end_exclusive)) + } else { + Point::new(end_exclusive, 0) + }; + RowChunk { + id, + start: chunk_start, + end_exclusive, + start_anchor: snapshot.anchor_before(start), + end_anchor: snapshot.anchor_after(end), + } + }) + .collect::>(); + Self { + chunks: Arc::from(chunks), + version: snapshot.version().clone(), + } + } + + pub fn version(&self) -> &clock::Global { + &self.version + } + + pub fn len(&self) -> usize { + self.chunks.len() + } + + pub fn applicable_chunks(&self, ranges: &[Range]) -> impl Iterator { + let row_ranges = ranges + .iter() + // Be lenient and yield multiple chunks if they "touch" the exclusive part of the range. + // This will result in LSP hints [re-]queried for more ranges, but also more hints already visible when scrolling around. + .map(|point_range| point_range.start.row..point_range.end.row + 1) + .collect::>(); + self.chunks + .iter() + .filter(move |chunk| -> bool { + let chunk_range = chunk.row_range().to_inclusive(); + row_ranges + .iter() + .any(|row_range| chunk_range.overlaps(&row_range)) + }) + .copied() + } + + pub fn previous_chunk(&self, chunk: RowChunk) -> Option { + if chunk.id == 0 { + None + } else { + self.chunks.get(chunk.id - 1).copied() + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RowChunk { + pub id: usize, + pub start: BufferRow, + pub end_exclusive: BufferRow, + pub start_anchor: Anchor, + pub end_anchor: Anchor, +} + +impl RowChunk { + pub fn row_range(&self) -> Range { + self.start..self.end_exclusive + } + + pub fn anchor_range(&self) -> Range { + self.start_anchor..self.end_anchor + } +} diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 4f3f760ba8b841c45031cd1a811f6bb1e8fd2534..54e2ef4065460547f4a3f86db7d3a3986dff65eb 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -6,6 +6,7 @@ use futures::FutureExt as _; use gpui::{App, AppContext as _, BorrowAppContext, Entity}; use gpui::{HighlightStyle, TestAppContext}; use indoc::indoc; +use pretty_assertions::assert_eq; use proto::deserialize_operation; use rand::prelude::*; use regex::RegexBuilder; @@ -46,8 +47,7 @@ fn test_line_endings(cx: &mut gpui::App) { init_settings(cx, |_| {}); cx.new(|cx| { - let mut buffer = - Buffer::local("one\r\ntwo\rthree", cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local("one\r\ntwo\rthree", cx).with_language(rust_lang(), cx); assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.line_ending(), LineEnding::Windows); @@ -151,7 +151,7 @@ fn test_select_language(cx: &mut App) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); registry.add(Arc::new(Language::new( LanguageConfig { - name: LanguageName::new("Rust"), + name: LanguageName::new_static("Rust"), matcher: LanguageMatcher { path_suffixes: vec!["rs".to_string()], ..Default::default() @@ -173,7 +173,7 @@ fn test_select_language(cx: &mut App) { ))); registry.add(Arc::new(Language::new( LanguageConfig { - name: LanguageName::new("Make"), + name: LanguageName::new_static("Make"), matcher: LanguageMatcher { path_suffixes: vec!["Makefile".to_string(), "mk".to_string()], ..Default::default() @@ -608,7 +608,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) { #[gpui::test] async fn test_reparse(cx: &mut gpui::TestAppContext) { let text = "fn a() {}"; - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); // Wait for the initial text to parse cx.executor().run_until_parked(); @@ -735,7 +735,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) { #[gpui::test] async fn test_resetting_language(cx: &mut gpui::TestAppContext) { let buffer = cx.new(|cx| { - let mut buffer = Buffer::local("{}", cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local("{}", cx).with_language(rust_lang(), cx); buffer.set_sync_parse_timeout(Duration::ZERO); buffer }); @@ -783,29 +783,49 @@ async fn test_outline(cx: &mut gpui::TestAppContext) { "# .unindent(); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); - let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + let outline = snapshot.outline(None); assert_eq!( outline .items .iter() - .map(|item| (item.text.as_str(), item.depth)) + .map(|item| ( + item.text.as_str(), + item.depth, + item.to_point(&snapshot).body_range(&snapshot) + .map(|range| minimize_space(&snapshot.text_for_range(range).collect::())) + )) .collect::>(), &[ - ("struct Person", 0), - ("name", 1), - ("age", 1), - ("mod module", 0), - ("enum LoginState", 1), - ("LoggedOut", 2), - ("LoggingOn", 2), - ("LoggedIn", 2), - ("person", 3), - ("time", 3), - ("impl Eq for Person", 0), - ("impl Drop for Person", 0), - ("fn drop", 1), + ("struct Person", 0, Some("name: String, age: usize,".to_string())), + ("name", 1, None), + ("age", 1, None), + ( + "mod module", + 0, + Some( + "enum LoginState { LoggedOut, LoggingOn, LoggedIn { person: Person, time: Instant, } }".to_string() + ) + ), + ( + "enum LoginState", + 1, + Some("LoggedOut, LoggingOn, LoggedIn { person: Person, time: Instant, }".to_string()) + ), + ("LoggedOut", 2, None), + ("LoggingOn", 2, None), + ("LoggedIn", 2, Some("person: Person, time: Instant,".to_string())), + ("person", 3, None), + ("time", 3, None), + ("impl Eq for Person", 0, Some("".to_string())), + ( + "impl Drop for Person", + 0, + Some("fn drop(&mut self) { println!(\"bye\"); }".to_string()) + ), + ("fn drop", 1, Some("println!(\"bye\");".to_string())), ] ); @@ -840,6 +860,11 @@ async fn test_outline(cx: &mut gpui::TestAppContext) { ] ); + fn minimize_space(text: &str) -> String { + static WHITESPACE: LazyLock = LazyLock::new(|| Regex::new("[\\n\\s]+").unwrap()); + WHITESPACE.replace_all(text, " ").trim().to_string() + } + async fn search<'a>( outline: &'a Outline, query: &'a str, @@ -865,7 +890,7 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) { "# .unindent(); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None)); assert_eq!( @@ -945,7 +970,7 @@ fn test_outline_annotations(cx: &mut App) { "# .unindent(); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let outline = buffer.update(cx, |buffer, _| buffer.snapshot().outline(None)); assert_eq!( @@ -993,7 +1018,7 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) { "# .unindent(); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); // point is at the start of an item @@ -1068,7 +1093,7 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) { " .unindent(), ); - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); // note, it would be nice to actually return the method test in this @@ -1087,8 +1112,7 @@ fn test_text_objects(cx: &mut App) { false, ); - let buffer = - cx.new(|cx| Buffer::local(text.clone(), cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text.clone(), cx).with_language(rust_lang(), cx)); let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let matches = snapshot @@ -1105,15 +1129,24 @@ fn test_text_objects(cx: &mut App) { "fn say() -> u8 { return /* hi */ 1 }", TextObject::AroundFunction ), + ( + "fn say() -> u8 { return /* hi */ 1 }", + TextObject::InsideClass + ), + ( + "impl Hello {\n fn say() -> u8 { return /* hi */ 1 }\n}", + TextObject::AroundClass + ), ], ) } #[gpui::test] fn test_enclosing_bracket_ranges(cx: &mut App) { - let mut assert = |selection_text, range_markers| { + #[track_caller] + fn assert(selection_text: &'static str, range_markers: Vec<&'static str>, cx: &mut App) { assert_bracket_pairs(selection_text, range_markers, rust_lang(), cx) - }; + } assert( indoc! {" @@ -1130,6 +1163,7 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } «}» let foo = 1;"}], + cx, ); assert( @@ -1156,6 +1190,7 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } let foo = 1;"}, ], + cx, ); assert( @@ -1182,6 +1217,7 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } let foo = 1;"}, ], + cx, ); assert( @@ -1199,6 +1235,7 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } «}» let foo = 1;"}], + cx, ); assert( @@ -1209,7 +1246,8 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } } let fˇoo = 1;"}, - vec![], + Vec::new(), + cx, ); // Regression test: avoid crash when querying at the end of the buffer. @@ -1221,14 +1259,20 @@ fn test_enclosing_bracket_ranges(cx: &mut App) { } } let foo = 1;ˇ"}, - vec![], + Vec::new(), + cx, ); } #[gpui::test] fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &mut App) { let mut assert = |selection_text, bracket_pair_texts| { - assert_bracket_pairs(selection_text, bracket_pair_texts, javascript_lang(), cx) + assert_bracket_pairs( + selection_text, + bracket_pair_texts, + Arc::new(javascript_lang()), + cx, + ) }; assert( @@ -1261,7 +1305,7 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: & fn test_range_for_syntax_ancestor(cx: &mut App) { cx.new(|cx| { let text = "fn a() { b(|c| {}) }"; - let buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); let snapshot = buffer.snapshot(); assert_eq!( @@ -1313,7 +1357,7 @@ fn test_autoindent_with_soft_tabs(cx: &mut App) { cx.new(|cx| { let text = "fn a() {}"; - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); assert_eq!(buffer.text(), "fn a() {\n \n}"); @@ -1355,7 +1399,7 @@ fn test_autoindent_with_hard_tabs(cx: &mut App) { cx.new(|cx| { let text = "fn a() {}"; - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); assert_eq!(buffer.text(), "fn a() {\n\t\n}"); @@ -1404,7 +1448,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut App) .unindent(), cx, ) - .with_language(Arc::new(rust_lang()), cx); + .with_language(rust_lang(), cx); // Lines 2 and 3 don't match the indentation suggestion. When editing these lines, // their indentation is not adjusted. @@ -1545,7 +1589,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut App) .unindent(), cx, ) - .with_language(Arc::new(rust_lang()), cx); + .with_language(rust_lang(), cx); // Insert a closing brace. It is outdented. buffer.edit_via_marked_text( @@ -1608,7 +1652,7 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap .unindent(), cx, ) - .with_language(Arc::new(rust_lang()), cx); + .with_language(rust_lang(), cx); // Regression test: line does not get outdented due to syntax error buffer.edit_via_marked_text( @@ -1667,7 +1711,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut App) { .unindent(), cx, ) - .with_language(Arc::new(rust_lang()), cx); + .with_language(rust_lang(), cx); buffer.edit_via_marked_text( &" @@ -1717,7 +1761,7 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut App) { cx.new(|cx| { let text = "a\nb"; - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); buffer.edit( [(0..1, "\n"), (2..3, "\n")], Some(AutoindentMode::EachLine), @@ -1743,7 +1787,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut App) { " .unindent(); - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); buffer.edit( [(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")], Some(AutoindentMode::EachLine), @@ -1780,7 +1824,7 @@ fn test_autoindent_block_mode(cx: &mut App) { } "# .unindent(); - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); // When this text was copied, both of the quotation marks were at the same // indent level, but the indentation of the first line was not included in @@ -1863,7 +1907,7 @@ fn test_autoindent_block_mode_with_newline(cx: &mut App) { } "# .unindent(); - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); // First line contains just '\n', it's indentation is stored in "original_indent_columns" let original_indent_columns = vec![Some(4)]; @@ -1915,7 +1959,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut App) { } "# .unindent(); - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); // The original indent columns are not known, so this text is // auto-indented in a block as if the first line was copied in @@ -2006,7 +2050,7 @@ fn test_autoindent_block_mode_multiple_adjacent_ranges(cx: &mut App) { false, ); - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); buffer.edit( [ @@ -2020,7 +2064,7 @@ fn test_autoindent_block_mode_multiple_adjacent_ranges(cx: &mut App) { cx, ); - pretty_assertions::assert_eq!( + assert_eq!( buffer.text(), " mod numbers { @@ -2214,7 +2258,7 @@ async fn test_async_autoindents_preserve_preview(cx: &mut TestAppContext) { // Then we request that a preview tab be preserved for the new version, even though it's edited. let buffer = cx.new(|cx| { let text = "fn a() {}"; - let mut buffer = Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx); + let mut buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); // This causes autoindent to be async. buffer.set_sync_parse_timeout(Duration::ZERO); @@ -2672,7 +2716,7 @@ fn test_language_at_with_hidden_languages(cx: &mut App) { .unindent(); let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); - language_registry.add(Arc::new(markdown_lang())); + language_registry.add(markdown_lang()); language_registry.add(Arc::new(markdown_inline_lang())); let mut buffer = Buffer::local(text, cx); @@ -2714,9 +2758,9 @@ fn test_language_at_for_markdown_code_block(cx: &mut App) { .unindent(); let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); - language_registry.add(Arc::new(markdown_lang())); + language_registry.add(markdown_lang()); language_registry.add(Arc::new(markdown_inline_lang())); - language_registry.add(Arc::new(rust_lang())); + language_registry.add(rust_lang()); let mut buffer = Buffer::local(text, cx); buffer.set_language_registry(language_registry.clone()); @@ -3113,7 +3157,7 @@ async fn test_preview_edits(cx: &mut TestAppContext) { cx: &mut TestAppContext, assert_fn: impl Fn(HighlightedText), ) { - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx)); let edits = buffer.read_with(cx, |buffer, _| { edits .into_iter() @@ -3420,7 +3464,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { for buffer in &buffers { let buffer = buffer.read(cx).snapshot(); let actual_remote_selections = buffer - .selections_in_range(Anchor::MIN..Anchor::MAX, false) + .selections_in_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), false) .map(|(replica_id, _, _, selections)| (replica_id, selections.collect::>())) .collect::>(); let expected_remote_selections = active_selections @@ -3455,6 +3499,25 @@ fn test_contiguous_ranges() { ); } +#[gpui::test] +fn test_insertion_after_deletion(cx: &mut gpui::App) { + let buffer = cx.new(|cx| Buffer::local("struct Foo {\n \n}", cx)); + buffer.update(cx, |buffer, cx| { + let mut anchor = buffer.anchor_after(17); + buffer.edit([(12..18, "")], None, cx); + let snapshot = buffer.snapshot(); + assert_eq!(snapshot.text(), "struct Foo {}"); + if !anchor.is_valid(&snapshot) { + anchor = snapshot.anchor_after(snapshot.offset_for_anchor(&anchor)); + } + buffer.edit([(anchor..anchor, "\n")], None, cx); + buffer.edit([(anchor..anchor, "field1:")], None, cx); + buffer.edit([(anchor..anchor, " i32,")], None, cx); + let snapshot = buffer.snapshot(); + assert_eq!(snapshot.text(), "struct Foo {\nfield1: i32,}"); + }) +} + #[gpui::test(iterations = 500)] fn test_trailing_whitespace_ranges(mut rng: StdRng) { // Generate a random multi-line string containing @@ -3505,7 +3568,7 @@ let word=öäpple.bar你 Öäpple word2-öÄpPlE-Pizza-word ÖÄPPLE word "#; let buffer = cx.new(|cx| { - let buffer = Buffer::local(contents, cx).with_language(Arc::new(rust_lang()), cx); + let buffer = Buffer::local(contents, cx).with_language(rust_lang(), cx); assert_eq!(buffer.text(), contents); buffer.check_invariants(); buffer @@ -3665,7 +3728,7 @@ fn ruby_lang() -> Language { fn html_lang() -> Language { Language::new( LanguageConfig { - name: LanguageName::new("HTML"), + name: LanguageName::new_static("HTML"), block_comment: Some(BlockCommentConfig { start: " ‹«/👉例/Cool Spaces»›"); + test_path!(" ::: ‹«/例👈/Cool Spaces»›"); + test_path!(" --> ‹«/👉例/Cool Spaces»:«4»:«2»›"); + test_path!(" ::: ‹«/例👈/Cool Spaces»(«4»,«2»)›"); + test_path!(" panicked at ‹«/👉例/Cool Spaces»:«4»:«2»›:"); + test_path!(" panicked at ‹«/例👈/Cool Spaces»(«4»,«2»)›:"); + test_path!(" at ‹«/👉例/Cool Spaces»:«4»:«2»›"); + test_path!(" at ‹«/例👈/Cool Spaces»(«4»,«2»)›"); + // Python test_path!("‹«👉例wesome.py»›"); test_path!("‹«例👈wesome.py»›"); @@ -624,7 +846,14 @@ mod tests { } #[test] - #[should_panic(expected = "No hyperlink found")] + // + fn issue_12338_regex() { + // Issue #12338 + test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«'test file 👉1.txt'»›"); + test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«👉'test file 1.txt'»›"); + } + + #[test] // fn issue_12338() { // Issue #12338 @@ -658,30 +887,45 @@ mod tests { test_path!(" ‹File \"«/🏃👈wesome.🔥»\", line «42»›: Wat?"); } + #[test] + // + fn issue_40202() { + // Elixir + test_path!("[‹«lib/blitz_apex_👉server/stats/aggregate_rank_stats.ex»:«35»›: BlitzApexServer.Stats.AggregateRankStats.update/2] + 1 #=> 1"); + } + + #[test] + // + fn issue_28194() { + test_path!( + "‹«test/c👉ontrollers/template_items_controller_test.rb»:«20»›:in 'block (2 levels) in '" + ); + } + #[test] #[cfg_attr( not(target_os = "windows"), should_panic( - expected = "Path = «test/controllers/template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)" + expected = "Path = «/test/cool.rs:4:NotDesc», at grid cells (0, 1)..=(7, 2)" ) )] #[cfg_attr( target_os = "windows", should_panic( - expected = r#"Path = «test\\controllers\\template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)"# + expected = r#"Path = «C:\\test\\cool.rs:4:NotDesc», at grid cells (0, 1)..=(8, 1)"# ) )] - // - // - // #28194 was closed, but the link includes the description part (":in" here), which - // seems wrong... - fn issue_28194() { - test_path!( - "‹«test/c👉ontrollers/template_items_controller_test.rb»:«20»›:in 'block (2 levels) in '" - ); - test_path!( - "‹«test/controllers/template_items_controller_test.rb»:«19»›:i👉n 'block in '" - ); + // PathWithPosition::parse_str considers "/test/co👉ol.rs:4:NotDesc" invalid input, but + // still succeeds and truncates the part after the position. Ideally this would be + // parsed as the path "/test/co👉ol.rs:4:NotDesc" with no position. + fn path_with_position_parse_str() { + test_path!("`‹«/test/co👉ol.rs:4:NotDesc»›`"); + test_path!("<‹«/test/co👉ol.rs:4:NotDesc»›>"); + + test_path!("'‹«(/test/co👉ol.rs:4:2)»›'"); + test_path!("'‹«(/test/co👉ol.rs(4))»›'"); + test_path!("'‹«(/test/co👉ol.rs(4,2))»›'"); } } @@ -715,35 +959,38 @@ mod tests { test_path!("‹«/👉test/cool.rs(1,618033988749)»›"); } - #[test] - #[should_panic(expected = "Path = «»")] - fn colon_suffix_succeeds_in_finding_an_empty_maybe_path() { - test_path!("‹«/test/cool.rs»:«4»:«2»›👉:", "What is this?"); - test_path!("‹«/test/cool.rs»(«4»,«2»)›👉:", "What is this?"); - } - #[test] #[cfg_attr( not(target_os = "windows"), - should_panic(expected = "Path = «/test/cool.rs»") + should_panic(expected = "Path = «/te:st/co:ol.r:s:4:2::::::»") )] #[cfg_attr( target_os = "windows", - should_panic(expected = r#"Path = «C:\\test\\cool.rs»"#) + should_panic(expected = r#"Path = «C:\\te:st\\co:ol.r:s:4:2::::::»"#) )] fn many_trailing_colons_should_be_parsed_as_part_of_the_path() { - test_path!("‹«/test/cool.rs:::👉:»›"); test_path!("‹«/te:st/👉co:ol.r:s:4:2::::::»›"); + test_path!("/test/cool.rs:::👉:"); } } - #[cfg(target_os = "windows")] mod windows { // Lots of fun to be had with long file paths (verbatim) and UNC paths on Windows. // See // See // See + #[test] + fn default_prompts() { + // Windows command prompt + test_path!(r#"‹«C:\Users\someone\👉test»›>"#); + test_path!(r#"C:\Users\someone\test👉>"#); + + // Windows PowerShell + test_path!(r#"PS ‹«C:\Users\someone\👉test\cool.rs»›>"#); + test_path!(r#"PS C:\Users\someone\test\cool.rs👉>"#); + } + #[test] fn unc() { test_path!(r#"‹«\\server\share\👉test\cool.rs»›"#); @@ -752,29 +999,234 @@ mod tests { mod issues { #[test] - #[should_panic( - expected = r#"Path = «C:\\test\\cool.rs», at grid cells (0, 0)..=(6, 0)"# - )] fn issue_verbatim() { test_path!(r#"‹«\\?\C:\👉test\cool.rs»›"#); test_path!(r#"‹«\\?\C:\test\cool👉.rs»›"#); } #[test] - #[should_panic( - expected = r#"Path = «\\\\server\\share\\test\\cool.rs», at grid cells (0, 0)..=(10, 2)"# - )] fn issue_verbatim_unc() { test_path!(r#"‹«\\?\UNC\server\share\👉test\cool.rs»›"#); test_path!(r#"‹«\\?\UNC\server\share\test\cool👉.rs»›"#); } } } + + mod perf { + use super::super::*; + use crate::TerminalSettings; + use alacritty_terminal::{ + event::VoidListener, + grid::Scroll, + index::{Column, Point as AlacPoint}, + term::test::mock_term, + term::{Term, search::Match}, + }; + use settings::{self, Settings, SettingsContent}; + use std::{cell::RefCell, rc::Rc}; + use util_macros::perf; + + fn build_test_term( + line: &str, + repeat: usize, + hover_offset_column: usize, + ) -> (Term, AlacPoint) { + let content = line.repeat(repeat); + let mut term = mock_term(&content); + term.resize(TermSize { + columns: 1024, + screen_lines: 10, + }); + term.scroll_display(Scroll::Top); + let point = + AlacPoint::new(Line(term.topmost_line().0 + 3), Column(hover_offset_column)); + (term, point) + } + + #[perf] + pub fn cargo_hyperlink_benchmark() { + const LINE: &str = " Compiling terminal v0.1.0 (/Hyperlinks/Bench/Source/zed-hyperlinks/crates/terminal)\r\n"; + thread_local! { + static TEST_TERM_AND_POINT: (Term, AlacPoint) = + build_test_term(LINE, 500, 50); + } + TEST_TERM_AND_POINT.with(|(term, point)| { + assert_eq!( + find_from_grid_point_bench(term, *point) + .map(|(path, ..)| path) + .unwrap_or_default(), + "/Hyperlinks/Bench/Source/zed-hyperlinks/crates/terminal", + "Hyperlink should have been found" + ); + }); + } + + #[perf] + pub fn rust_hyperlink_benchmark() { + const LINE: &str = " --> /Hyperlinks/Bench/Source/zed-hyperlinks/crates/terminal/terminal.rs:1000:42\r\n"; + thread_local! { + static TEST_TERM_AND_POINT: (Term, AlacPoint) = + build_test_term(LINE, 500, 50); + } + TEST_TERM_AND_POINT.with(|(term, point)| { + assert_eq!( + find_from_grid_point_bench(term, *point) + .map(|(path, ..)| path) + .unwrap_or_default(), + "/Hyperlinks/Bench/Source/zed-hyperlinks/crates/terminal/terminal.rs:1000:42", + "Hyperlink should have been found" + ); + }); + } + + #[perf] + pub fn ls_hyperlink_benchmark() { + const LINE: &str = "Cargo.toml experiments notebooks rust-toolchain.toml tooling\r\n"; + thread_local! { + static TEST_TERM_AND_POINT: (Term, AlacPoint) = + build_test_term(LINE, 500, 60); + } + TEST_TERM_AND_POINT.with(|(term, point)| { + assert_eq!( + find_from_grid_point_bench(term, *point) + .map(|(path, ..)| path) + .unwrap_or_default(), + "rust-toolchain.toml", + "Hyperlink should have been found" + ); + }); + } + + #[perf] + // https://github.com/zed-industries/zed/pull/44407 + pub fn pr_44407_hyperlink_benchmark() { + const LINE: &str = "-748, 706, 163, 222, -980, 949, 381, -568, 199, 501, 760, -821, 90, -451, 183, 867, -351, -810, -762, -109, 423, 84, 14, -77, -820, -345, 74, -791, 930, -618, -900, 862, -959, 289, -19, 471, -757, 793, 155, -554, 249, 830, 402, 732, -731, -866, -720, -703, -257, -439, 731, 872, -489, 676, -167, 613, -698, 415, -80, -453, -896, 333, -511, 621, -450, 624, -309, -575, 177, 141, 891, -104, -97, -367, -599, -675, 607, -225, -760, 552, -465, 804, 55, 282, 104, -929, -252,\ +-311, 900, 550, 599, -80, 774, 553, 837, -395, 541, 953, 154, -396, -596, -111, -802, -221, -337, -633, -73, -527, -82, -658, -264, 222, 375, 434, 204, -756, -703, 303, 239, -257, -365, -351, 904, 364, -743, -484, 655, -542, 446, 888, 632, -167, -260, 716, 150, 806, 723, 513, -118, -323, -683, 983, -564, 358, -16, -287, 277, -607, 87, 365, -1, 164, 401, 257, 369, -893, 145, -969, 375, -53, 541, -408, -865, 753, 258, 337, -886, 593, -378, -528, 191, 204, 566, -61, -621, 769, 524, -628, 6,\ +249, 896, -785, -776, 321, -681, 604, -740, 886, 426, -480, -983, 23, -247, 125, -666, 913, 842, -460, -797, -483, -58, -565, -587, -206, 197, 715, 764, -97, 457, -149, -226, 261, 194, -390, 431, 180, -778, 829, -657, -668, 397, 859, 152, -178, 677, -18, 687, -247, 96, 466, -572, 478, 622, -143, -25, -471, 265, 335, 957, 152, -951, -647, 670, 57, 152, -115, 206, 87, 629, -798, -125, -725, -31, 844, 398, -876, 44, 963, -211, 518, -8, -103, -999, 948, 823, 149, -803, 769, -236, -683, 527,\ +-108, -36, 18, -437, 687, -305, -526, 972, -965, 276, 420, -259, -379, -142, -747, 600, -578, 197, 673, 890, 324, -931, 755, -765, -422, 785, -369, -110, -505, 532, -208, -438, 713, 110, 853, 996, -360, 823, 289, -699, 629, -661, 560, -329, -323, 439, 571, -537, 644, -84, 25, -536, -161, 112, 169, -922, -537, -734, -423, 37, 451, -149, 408, 18, -672, 206, -784, 444, 593, -241, 502, -259, -798, -352, -658, 712, -675, -734, 627, -620, 64, -554, 999, -537, -160, -641, 464, 894, 29, 322, 566,\ +-510, -749, 982, 204, 967, -261, -986, -136, 251, -598, 995, -831, 891, 22, 761, -783, -415, 125, 470, -919, -97, -668, 85, 205, -175, -550, 502, 652, -468, 798, 775, -216, 89, -433, -24, -621, 877, -126, 951, 809, 782, 156, -618, -841, -463, 19, -723, -904, 550, 263, 991, -758, -114, 446, -731, -623, -634, 462, 48, 851, 333, -846, 480, 892, -966, -910, -436, 317, -711, -341, -294, 124, 238, -214, -281, 467, -950, -342, 913, -90, -388, -573, 740, -883, -451, 493, -500, 863, 930, 127, 530,\ +-810, 540, 541, -664, -951, -227, -420, -476, -581, -534, 549, 253, 984, -985, -84, -521, 538, 484, -440, 371, 784, -306, -850, 530, -133, 251, -799, 446, -170, -243, -674, 769, 646, 778, -680, -714, -442, 804, 901, -774, 69, 307, -293, 755, 443, 224, -918, -771, 723, 40, 132, 568, -847, -47, 844, 69, 986, -293, -459, 313, 155, 331, 69, 280, -637, 569, 104, -119, -988, 252, 857, -590, 810, -891, 484, 566, -934, -587, -290, 566, 587, 489, 870, 280, 454, -252, 613, -701, -278, 195, -198,\ +683, 533, -372, 707, -152, 371, 866, 609, -5, -372, -30, -694, 552, 192, 452, -663, 350, -985, 10, 884, 813, -592, -331, -470, 711, -941, 928, 379, -339, 220, 999, 376, 507, 179, 916, 84, 104, 392, 192, 299, -860, 218, -698, -919, -452, 37, 850, 5, -874, 287, 123, -746, -575, 776, -909, 118, 903, -275, 450, -996, -591, -920, -850, 453, -896, 73, 83, -535, -20, 287, -765, 442, 808, 45, 445, 202, 917, -208, 783, 790, -534, 373, -129, 556, -757, -69, 459, -163, -59, 265, -563, -889, 635,\ +-583, -261, -790, 799, 826, 953, 85, 619, 334, 842, 672, -869, -4, -833, 315, 942, -524, 579, 926, 628, -404, 128, -629, 161, 568, -117, -526, 223, -876, 906, 176, -549, -317, 381, 375, -801, -416, 647, 335, 253, -386, -375, -254, 635, 352, 317, 398, -422, 111, 201, 220, 554, -972, 853, 378, 956, 942, -857, -289, -333, -180, 488, -814, -42, -595, 721, 39, 644, 721, -242, -44, 643, -457, -419, 560, -863, 974, 458, 222, -882, 526, -243, -318, -343, -707, -401, 117, 677, -489, 546, -903,\ +-960, -881, -684, 125, -928, -995, -692, -773, 647, -718, -862, -814, 671, 664, -130, -856, -674, 653, 711, 194, -685, -160, 138, -27, -128, -671, -242, 526, 494, -674, 424, -921, -778, 313, -237, 332, 913, 252, 808, -936, 289, 755, 52, -139, 57, -19, -827, -775, -561, -14, 107, -84, 622, -303, -747, 258, -942, 290, 211, -919, -207, 797, 95, 794, -830, -181, -788, 757, 75, -946, -949, -988, 152, 340, 732, 886, -891, -642, -666, 321, -910, 841, 632, 298, 55, -349, 498, 287, -711, 97, 305,\ +-974, -987, 790, -64, 605, -583, -821, 345, 887, -861, 548, 894, 288, 452, 556, -448, 813, 420, 545, 967, 127, -947, 19, -314, -607, -513, -851, 254, -290, -938, -783, -93, 474, 368, -485, -935, -539, 81, 404, -283, 779, 345, -164, 53, 563, -771, 911, -323, 522, -998, 315, 415, 460, 58, -541, -878, -152, -886, 201, -446, -810, 549, -142, -575, -632, 521, 549, 209, -681, 998, 798, -611, -919, -708, -4, 677, -172, 588, 750, -435, 508, 609, 498, -535, -691, -738, 85, 615, 705, 169, 425,\ +-669, -491, -783, 73, -847, 228, -981, -812, -229, 950, -904, 175, -438, 632, -556, 910, 173, 576, -751, -53, -169, 635, 607, -944, -13, -84, 105, -644, 984, 935, 259, -445, 620, -405, 832, 167, 114, 209, -181, -944, -496, 693, -473, 137, 38, -873, -334, -353, -57, 397, 944, 698, 811, -401, 712, -667, 905, 276, -653, 368, -543, -349, 414, 287, 894, 935, 461, 55, 741, -623, -660, -773, 617, 834, 278, -121, 52, 495, -855, -440, -210, -99, 279, -661, 540, 934, 540, 784, 895, 268, -503, 513,\ +-484, -352, 528, 341, -451, 885, -71, 799, -195, -885, -585, -233, 92, 453, 994, 464, 694, 190, -561, -116, 675, -775, -236, 556, -110, -465, 77, -781, 507, -960, -410, 229, -632, 717, 597, 429, 358, -430, -692, -825, 576, 571, 758, -891, 528, -267, 190, -869, 132, -811, 796, 750, -596, -681, 870, 360, 969, 860, -412, -567, 694, -86, -498, 38, -178, -583, -778, 412, 842, -586, 722, -192, 350, 363, 81, -677, -163, 564, 543, 671, 110, 314, 739, -552, -224, -644, 922, 685, 134, 613, 793,\ +-363, -244, -284, -257, -561, 418, 988, 333, 110, -966, 790, 927, 536, -620, -309, -358, 895, -867, -796, -357, 308, -740, 287, -732, -363, -969, 658, 711, 511, 256, 590, -574, 815, -845, -84, 546, -581, -71, -334, -890, 652, -959, 320, -236, 445, -851, 825, -756, -4, 877, 308, 573, -117, 293, 686, -483, 391, 342, -550, -982, 713, 886, 552, 474, -673, 283, -591, -383, 988, 435, -131, 708, -326, -884, 87, 680, -818, -408, -486, 813, -307, -799, 23, -497, 802, -146, -100, 541, 7, -493, 577,\ +50, -270, 672, 834, 111, -788, 247, 337, 628, -33, -964, -519, 683, 54, -703, 633, -127, -448, 759, -975, 696, 2, -870, -760, 67, 696, 306, 750, 615, 155, -933, -568, 399, 795, 164, -460, 205, 439, -526, -691, 35, -136, -481, -63, 73, -598, 748, 133, 874, -29, 4, -73, 472, 389, 962, 231, -328, 240, 149, 959, 46, -207, 72, -514, -608, 0, -14, 32, 374, -478, -806, 919, -729, -286, 652, 109, 509, -879, -979, -865, 584, -92, -346, -992, 781, 401, 575, 993, -746, -33, 684, -683, 750, -105,\ +-425, -508, -627, 27, 770, -45, 338, 921, -139, -392, -933, 634, 563, 224, -780, 921, 991, 737, 22, 64, 414, -249, -687, 869, 50, 759, -97, 515, 20, -775, -332, 957, 138, -542, -835, 591, -819, 363, -715, -146, -950, -641, -35, -435, -407, -548, -984, 383, -216, -559, 853, 4, -410, -319, -831, -459, -628, -819, -324, 755, 696, -192, 238, -234, -724, -445, 915, 302, -708, 484, 224, -641, 25, -771, 528, -106, -744, -588, 913, -554, -515, -239, -843, -812, -171, 721, 543, -269, 440, 151,\ +996, -723, -557, -522, -280, -514, -593, 208, 715, 404, 353, 270, -483, -785, 318, -313, 798, 638, 764, 748, -929, -827, -318, -56, 389, -546, -958, -398, 463, -700, 461, 311, -787, -488, 877, 456, 166, 535, -995, -189, -715, 244, 40, 484, 212, -329, -351, 638, -69, -446, -292, 801, -822, 490, -486, -185, 790, 370, -340, 401, -656, 584, 561, -749, 269, -19, -294, -111, 975, 874, -73, 851, 231, -331, -684, 460, 765, -654, -76, 10, 733, 520, 521, 416, -958, -202, -186, -167, 175, 343, -50,\ +673, -763, -854, -977, -17, -853, -122, -25, 180, 149, 268, 874, -816, -745, 747, -303, -959, 390, 509, 18, -66, 275, -277, 9, 837, -124, 989, -542, -649, -845, 894, 926, 997, -847, -809, -579, -96, -372, 766, 238, -251, 503, 559, 276, -281, -102, -735, 815, 109, 175, -10, 128, 543, -558, -707, 949, 996, -422, -506, 252, 702, -930, 552, -961, 584, -79, -177, 341, -275, 503, -21, 677, -545, 8, -956, -795, -870, -254, 170, -502, -880, 106, 174, 459, 603, -600, -963, 164, -136, -641, -309,\ +-380, -707, -727, -10, 727, 952, 997, -731, -133, 269, 287, 855, 716, -650, 479, 299, -839, -308, -782, 769, 545, 663, -536, -115, 904, -986, -258, -562, 582, 664, 408, -525, -889, 471, -370, -534, -220, 310, 766, 931, -193, -897, -192, -74, -365, -256, -359, -328, 658, -691, -431, 406, 699, 425, 713, -584, -45, -588, 289, 658, -290, -880, -987, -444, 371, 904, -155, 81, -278, -708, -189, -78, 655, 342, -998, -647, -734, -218, 726, 619, 663, 744, 518, 60, -409, 561, -727, -961, -306,\ +-147, -550, 240, -218, -393, 267, 724, 791, -548, 480, 180, -631, 825, -170, 107, 227, -691, 905, -909, 359, 227, 287, 909, 632, -89, -522, 80, -429, 37, 561, -732, -474, 565, -798, -460, 188, 507, -511, -654, 212, -314, -376, -997, -114, -708, 512, -848, 781, 126, -956, -298, 354, -400, -121, 510, 445, 926, 27, -708, 676, 248, 834, 542, 236, -105, -153, 102, 128, 96, -348, -626, 598, 8, 978, -589, -461, -38, 381, -232, -817, 467, 356, -151, -460, 429, -408, 425, 618, -611, -247, 819,\ +963, -160, 1000, 141, -647, -875, 108, 790, -127, 463, -37, -195, -542, 12, 845, -384, 770, -129, 315, 826, -942, 430, 146, -170, -583, -903, -489, 497, -559, -401, -29, -129, -411, 166, 942, -646, -862, -404, 785, 777, -111, -481, -738, 490, 741, -398, 846, -178, -509, -661, 748, 297, -658, -567, 531, 427, -201, -41, -808, -668, 782, -860, -324, 249, 835, -234, 116, 542, -201, 328, 675, 480, -906, 188, 445, 63, -525, 811, 277, 133, 779, -680, 950, -477, -306, -64, 552, -890, -956, 169,\ +442, 44, -169, -243, -242, 423, -884, -757, -403, 739, -350, 383, 429, 153, -702, -725, 51, 310, 857, -56, 538, 46, -311, 132, -620, -297, -124, 534, 884, -629, -117, 506, -837, -100, -27, -381, -735, 262, 843, 703, 260, -457, 834, 469, 9, 950, 59, 127, -820, 518, 64, -783, 659, -608, -676, 802, 30, 589, 246, -369, 361, 347, 534, -376, 68, 941, 709, 264, 384, 481, 628, 199, -568, -342, -337, 853, -804, -858, -169, -270, 641, -344, 112, 530, -773, -349, -135, -367, -350, -756, -911, 180,\ +-660, 116, -478, -265, -581, 510, 520, -986, 935, 219, 522, 744, 47, -145, 917, 638, 301, 296, 858, -721, 511, -816, 328, 473, 441, 697, -260, -673, -379, 893, 458, 154, 86, 905, 590, 231, -717, -179, 79, 272, -439, -192, 178, -200, 51, 717, -256, -358, -626, -518, -314, -825, -325, 588, 675, -892, -798, 448, -518, 603, -23, 668, -655, 845, -314, 783, -347, -496, 921, 893, -163, -748, -906, 11, -143, -64, 300, 336, 882, 646, 533, 676, -98, -148, -607, -952, -481, -959, -874, 764, 537,\ +736, -347, 646, -843, 966, -916, -718, -391, -648, 740, 755, 919, -608, 388, -655, 68, 201, 675, -855, 7, -503, 881, 760, 669, 831, 721, -564, -445, 217, 331, 970, 521, 486, -254, 25, -259, 336, -831, 252, -995, 908, -412, -240, 123, -478, 366, 264, -504, -843, 632, -288, 896, 301, 423, 185, 318, 380, 457, -450, -162, -313, 673, -963, 570, 433, -548, 107, -39, -142, -98, -884, -3, 599, -486, -926, 923, -82, 686, 290, 99, -382, -789, 16, 495, 570, 284, 474, -504, -201, -178, -1, 592, 52,\ +827, -540, -151, -991, 130, 353, -420, -467, -661, 417, -690, 942, 936, 814, -566, -251, -298, 341, -139, 786, 129, 525, -861, 680, 955, -245, -50, 331, 412, -38, -66, 611, -558, 392, -629, -471, -68, -535, 744, 495, 87, 558, 695, 260, -308, 215, -464, 239, -50, 193, -540, 184, -8, -194, 148, 898, -557, -21, 884, 644, -785, -689, -281, -737, 267, 50, 206, 292, 265, 380, -511, 310, 53, 375, -497, -40, 312, -606, -395, 142, 422, 662, -584, 72, 144, 40, -679, -593, 581, 689, -829, 442, 822,\ +977, -832, -134, -248, -207, 248, 29, 259, 189, 592, -834, -866, 102, 0, 340, 25, -354, -239, 420, -730, -992, -925, -314, 420, 914, 607, -296, -415, -30, 813, 866, 153, -90, 150, -81, 636, -392, -222, -835, 482, -631, -962, -413, -727, 280, 686, -382, 157, -404, -511, -432, 455, 58, 108, -408, 290, -829, -252, 113, 550, -935, 925, 422, 38, 789, 361, 487, -460, -769, -963, -285, 206, -799, -488, -233, 416, 143, -456, 753, 520, 599, 621, -168, 178, -841, 51, 952, 374, 166, -300, -576, 844,\ +-656, 90, 780, 371, 730, -896, -895, -386, -662, 467, -61, 130, -362, -675, -113, 135, -761, -55, 408, 822, 675, -347, 725, 114, 952, -510, -972, 390, -413, -277, -52, 315, -80, 401, -712, 147, -202, 84, 214, -178, 970, -571, -210, 525, -887, -863, 504, 192, 837, -594, 203, -876, -209, 305, -826, 377, 103, -928, -803, -956, 949, -868, -547, 824, -994, 516, 93, -524, -866, -890, -988, -501, 15, -6, 413, -825, 304, -818, -223, 525, 176, 610, 828, 391, 940, 540, -831, 650, 438, 589, 941, 57,\ +523, 126, 221, 860, -282, -262, -226, 764, 743, -640, 390, 384, -434, 608, -983, 566, -446, 618, 456, -176, -278, 215, 871, -180, 444, -931, -200, -781, 404, 881, 780, -782, 517, -739, -548, -811, 201, -95, -249, -228, 491, -299, 700, 964, -550, 108, 334, -653, 245, -293, -552, 350, -685, -415, -818, 216, -194, -255, 295, 249, 408, 351, 287, 379, 682, 231, -693, 902, -902, 574, 937, -708, -402, -460, 827, -268, 791, 343, -780, -150, -738, 920, -430, -88, -361, -588, -727, -47, -297, 662,\ +-840, -637, -635, 916, -857, 938, 132, -553, 391, -522, 640, 626, 690, 833, 867, -555, 577, 226, 686, -44, 0, -965, 651, -1, 909, 595, -646, 740, -821, -648, -962, 927, -193, 159, 490, 594, -189, 707, -884, 759, -278, -160, -566, -340, 19, 862, -440, 445, -598, 341, 664, -311, 309, -159, 19, -672, 705, -646, 976, 247, 686, -830, -27, -667, 81, 399, -423, -567, 945, 38, 51, 740, 621, 204, -199, -908, -593, 424, 250, -561, 695, 9, 520, 878, 120, -109, 42, -375, -635, -711, -687, 383, -278,\ +36, 970, 925, 864, 836, 309, 117, 89, 654, -387, 346, -53, 617, -164, -624, 184, -45, 852, 498, -513, 794, -682, -576, 13, -147, 285, -776, -886, -96, 483, 994, -188, 346, -629, -848, 738, 51, 128, -898, -753, -906, 270, -203, -577, 48, -243, -210, 666, 353, 636, -954, 862, 560, -944, -877, -137, 440, -945, -316, 274, -211, -435, 615, -635, -468, 744, 948, -589, 525, 757, -191, -431, 42, 451, -160, -827, -991, 324, 697, 342, -610, 894, -787, -384, 872, 734, 878, 70, -260, 57, 397, -518,\ +629, -510, -94, 207, 214, -625, 106, -882, -575, 908, -650, 723, -154, 45, 108, -69, -565, 927, -68, -351, 707, -282, 429, -889, -596, 848, 578, -492, 41, -822, -992, 168, -286, -780, 970, 597, -293, -12, 367, 708, -415, 194, -86, -390, 224, 69, -368, -674, 1000, -672, 356, -202, -169, 826, 476, -285, 29, -448, 545, 186, 319, 67, 705, 412, 225, -212, -351, -391, -783, -9, 875, -59, -159, -123, -151, -296, 871, -638, 359, 909, -945, 345, -16, -562, -363, -183, -625, -115, -571, -329, 514,\ +99, 263, 463, -39, 597, -652, -349, 246, 77, -127, -563, -879, -30, 756, 777, -865, 675, -813, -501, 871, -406, -627, 834, -609, -205, -812, 643, -204, 291, -251, -184, -584, -541, 410, -573, -600, 908, -871, -687, 296, -713, -139, -778, -790, 347, -52, -400, 407, -653, 670, 39, -856, 904, 433, 392, 590, -271, -144, -863, 443, 353, 468, -544, 486, -930, 458, -596, -890, 163, 822, 768, 980, -783, -792, 126, 386, 367, -264, 603, -61, 728, 160, -4, -837, 832, 591, 436, 518, 796, -622, -867,\ +-669, -947, 253, 100, -792, 841, 413, 833, -249, -550, 282, -825, 936, -348, 898, -451, -283, 818, -237, 630, 216, -499, -637, -511, 767, -396, 221, 958, -586, -920, 401, -313, -580, -145, -270, 118, 497, 426, -975, 480, -445, -150, -721, -929, 439, -893, 902, 960, -525, -793, 924, 563, 683, -727, -86, 309, 432, -762, -345, 371, -617, 149, -215, -228, 505, 593, -20, -292, 704, -999, 149, -104, 819, -414, -443, 517, -599, -5, 145, -24, -993, -283, 904, 174, -112, -276, -860, 44, -257,\ +-931, -821, -667, 540, 421, 485, 531, 407, 833, 431, -415, 878, 503, -901, 639, -608, 896, 860, 927, 424, 113, -808, -323, 729, 382, -922, 548, -791, -379, 207, 203, 559, 537, 137, 999, -913, -240, 942, 249, 616, 775, -4, 915, 855, -987, -234, -384, 948, -310, -542, 125, -289, -599, 967, -492, -349, -552, 562, -926, 632, -164, 217, -165, -496, 847, 684, -884, 457, -748, -745, -38, 93, 961, 934, 588, 366, -130, 851, -803, -811, -211, 428, 183, -469, 888, 596, -475, -899, -681, 508, 184,\ +921, 863, -610, -416, -119, -966, -686, 210, 733, 715, -889, -925, -434, -566, -455, 596, -514, 983, 755, -194, -802, -313, 91, -541, 808, -834, 243, -377, 256, 966, -402, -773, -308, -605, 266, 866, 118, -425, -531, 498, 666, 813, -267, 830, 69, -869, -496, 735, 28, 488, -645, -493, -689, 170, -940, 532, 844, -658, -617, 408, -200, 764, -665, 568, 342, 621, 908, 471, 280, 859, 709, 898, 81, -547, 406, 514, -595, 43, -824, -696, -746, -429, -59, -263, -813, 233, 279, -125, 687, -418,\ +-530, 409, 614, 803, -407, 78, -676, -39, -887, -141, -292, 270, -343, 400, 907, 588, 668, 899, 973, 103, -101, -11, 397, -16, 165, 705, -410, -585, 316, 391, -346, -336, 957, -118, -538, -441, -845, 121, 591, -359, -188, -362, -208, 27, -925, -157, -495, -177, -580, 9, 531, -752, 94, 107, 820, 769, -500, 852, 617, 145, 355, 34, -463, -265, -709, -111, -855, -405, 560, 470, 3, -177, -164, -249, 450, 662, 841, -689, -509, 987, -33, 769, 234, -2, 203, 780, 744, -895, 497, -432, -406, -264,\ +-71, 124, 778, -897, 495, 127, -76, 52, -768, 205, 464, -992, 801, -83, -806, 545, -316, 146, 772, 786, 289, -936, 145, -30, -722, -455, 270, 444, 427, -482, 383, -861, 36, 630, -404, 83, 864, 743, -351, -846, 315, -837, 357, -195, 450, -715, 227, -942, 740, -519, 476, 716, 713, 169, 492, -112, -49, -931, 866, 95, -725, 198, -50, -17, -660, 356, -142, -781, 53, 431, 720, 143, -416, 446, -497, 490, -96, 157, 239, 487, -337, -224, -445, 813, 92, -22, 603, 424, 952, -632, -367, 898, -927,\ +884, -277, -187, -777, 537, -575, -313, 347, -33, 800, 672, -919, -541, 5, -270, -94, -265, -793, -183, -761, -516, -608, -218, 57, -889, -912, 508, 93, -90, 34, 530, 201, 999, -37, -186, -62, -980, 239, 902, 983, -287, -634, 524, -772, 470, -961, 32, 162, 315, -411, 400, -235, -283, -787, -703, 869, 792, 543, -274, 239, 733, -439, 306, 349, 579, -200, -201, -824, 384, -246, 133, -508, 770, -102, 957, -825, 740, 748, -376, 183, -426, 46, 668, -886, -43, -174, 672, -419, 390, 927, 1000,\ +318, 886, 47, 908, -540, -825, -5, 314, -999, 354, -603, 966, -633, -689, 985, 534, -290, 167, -652, -797, -612, -79, 488, 622, -464, -950, 595, 897, 704, -238, -395, 125, 831, -180, 226, -379, 310, 564, 56, -978, 895, -61, 686, -251, 434, -417, 161, -512, 752, 528, -589, -425, 66, -925, -157, 1000, 96, 256, -239, -784, -882, -464, -909, 663, -177, -678, -441, 669, -564, -201, -121, -743, 187, -107, -768, -682, 355, 161, 411, 984, -954, 166, -842, -755, 267, -709, 372, -699, -272, -850,\ +403, -839, 949, 622, -62, 51, 917, 70, 528, -558, -632, 832, 276, 61, -445, -195, 960, 846, -474, 764, 879, -411, 948, -62, -592, -123, -96, -551, -555, -724, 849, 250, -808, -732, 797, -839, -554, 306, -919, 888, 484, -728, 152, -122, -287, 16, -345, -396, -268, -963, -500, 433, 343, 418, -480, 828, 594, 821, -9, 933, -230, 707, -847, -610, -748, -234, 688, 935, 713, 865, -743, 293, -143, -20, 928, -906, -762, 528, 722, 412, -70, 622, -245, 539, -686, 730, -866, -705, 28, -916, -623,\ +-768, -614, -915, -123, -183, 680, -223, 515, -37, -235, -5, 260, 347, -239, -322, -861, -848, -936, 945, 721, -580, -639, 780, -153, -26, 685, 177, 587, 307, -915, 435, 658, 539, -229, -719, -171, -858, 162, 734, -539, -437, 246, 639, 765, -477, -342, -209, -284, -779, -414, -452, 914, 338, -83, 759, 567, 266, -485, 14, 225, 347, -432, -242, 997, -365, -764, 119, -641, -416, -388, -436, -388, -54, -649, -571, -920, -477, 714, -363, 836, 369, 702, 869, 503, -287, -679, 46, -666, -202,\ +-602, 71, -259, 967, 601, -571, -830, -993, -271, 281, -494, 482, -180, 572, 587, -651, -566, -448, -228, 511, -924, 832, -52, -712, 402, -644, -533, -865, 269, 965, 56, 675, 179, -338, -272, 614, 602, -283, 303, -70, 909, -942, 117, 839, 468, 813, -765, 884, -697, -813, 352, 374, -705, -295, 633, 211, -754, 597, -941, -142, -393, -469, -653, 688, 996, 911, 214, 431, 453, -141, 874, -81, -258, -735, -3, -110, -338, -929, -182, -306, -104, -840, -588, -759, -157, -801, 848, -698, 627, 914,\ +-33, -353, 425, 150, -798, 553, 934, -778, -196, -132, 808, 745, -894, 144, 213, 662, 273, -79, 454, -60, -467, 48, -15, -807, 69, -930, 749, 559, -867, -103, 258, -677, 750, -303, 846, -227, -936, 744, -770, 770, -434, 594, -477, 589, -612, 535, 357, -623, 683, 369, 905, 980, -410, -663, 762, -888, -563, -845, 843, 353, -491, 996, -255, -336, -132, 695, -823, 289, -143, 365, 916, 877, 245, -530, -848, -804, -118, -108, 847, 620, -355, 499, 881, 92, -640, 542, 38, 626, -260, -34, -378,\ +598, 890, 305, -118, 711, -385, 600, -570, 27, -129, -893, 354, 459, 374, 816, 470, 356, 661, 877, 735, -286, -780, 620, 943, -169, -888, 978, 441, -667, -399, 662, 249, 137, 598, -863, -453, 722, -815, -251, -995, -294, -707, 901, 763, 977, 137, 431, -994, 905, 593, 694, 444, -626, -816, 252, 282, 616, 841, 360, -932, 817, -908, 50, 394, -120, -786, -338, 499, -982, -95, -454, 838, -312, 320, -127, -653, 53, 16, 988, -968, -151, -369, -836, 293, -271, 483, 18, 724, -204, -965, 245, 310,\ +987, 552, -835, -912, -861, 254, 560, 124, 145, 798, 178, 476, 138, -311, 151, -907, -886, -592, 728, -43, -489, 873, -422, -439, -489, 375, -703, -459, 338, 418, -25, 332, -454, 730, -604, -800, 37, -172, -197, -568, -563, -332, 228, -182, 994, -123, 444, -567, 98, 78, 0, -504, -150, 88, -936, 199, -651, -776, 192, 46, 526, -727, -991, 534, -659, -738, 256, -894, 965, -76, 816, 435, -418, 800, 838, 67, -733, 570, 112, -514, -416\r\ +"; + thread_local! { + static TEST_TERM_AND_POINT: (Term, AlacPoint) = + build_test_term(&LINE, 5, 50); + } + TEST_TERM_AND_POINT.with(|(term, point)| { + assert_eq!( + find_from_grid_point_bench(term, *point) + .map(|(path, ..)| path) + .unwrap_or_default(), + "392", + "Hyperlink should have been found" + ); + }); + } + + #[perf] + // https://github.com/zed-industries/zed/issues/44510 + pub fn issue_44510_hyperlink_benchmark() { + const LINE: &str = "..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +..............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................\ +...............................................E.\r\ +"; + thread_local! { + static TEST_TERM_AND_POINT: (Term, AlacPoint) = + build_test_term(&LINE, 5, 50); + } + TEST_TERM_AND_POINT.with(|(term, point)| { + assert_eq!( + find_from_grid_point_bench(term, *point) + .map(|(path, ..)| path) + .unwrap_or_default(), + LINE.trim_end_matches(['.', '\r', '\n']), + "Hyperlink should have been found" + ); + }); + } + + pub fn find_from_grid_point_bench( + term: &Term, + point: AlacPoint, + ) -> Option<(String, bool, Match)> { + const PATH_HYPERLINK_TIMEOUT_MS: u64 = 1000; + + thread_local! { + static TEST_REGEX_SEARCHES: RefCell = + RefCell::new({ + let default_settings_content: Rc = + settings::parse_json_with_comments(&settings::default_settings()) + .unwrap(); + let default_terminal_settings = + TerminalSettings::from_settings(&default_settings_content); + + RegexSearches::new( + &default_terminal_settings.path_hyperlink_regexes, + PATH_HYPERLINK_TIMEOUT_MS + ) + }); + } + + TEST_REGEX_SEARCHES.with(|regex_searches| { + find_from_grid_point(&term, point, &mut regex_searches.borrow_mut()) + }) + } + } } mod file_iri { - // File IRIs have a ton of use cases, most of which we currently do not support. A few of - // those cases are documented here as tests which are expected to fail. + // File IRIs have a ton of use cases. Absolute file URIs are supported on all platforms, + // including Windows drive letters (e.g., file:///C:/path) and percent-encoded characters. + // Some cases like relative file IRIs are not supported. // See https://en.wikipedia.org/wiki/File_URI_scheme /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns** @@ -794,7 +1246,6 @@ mod tests { mod issues { #[cfg(not(target_os = "windows"))] #[test] - #[should_panic(expected = "Path = «/test/Ῥόδος/», at grid cells (0, 0)..=(15, 1)")] fn issue_file_iri_with_percent_encoded_characters() { // Non-space characters // file:///test/Ῥόδος/ @@ -821,19 +1272,14 @@ mod tests { } // See https://en.wikipedia.org/wiki/File_URI_scheme + // https://github.com/zed-industries/zed/issues/39189 #[test] - #[should_panic( - expected = r#"Path = «C:\\test\\cool\\index.rs», at grid cells (0, 0)..=(9, 1)"# - )] - fn issue_absolute_file_iri() { + fn issue_39189() { test_file_iri!("file:///C:/test/cool/index.rs"); test_file_iri!("file:///C:/test/cool/"); } #[test] - #[should_panic( - expected = r#"Path = «C:\\test\\Ῥόδος\\», at grid cells (0, 0)..=(16, 1)"# - )] fn issue_file_iri_with_percent_encoded_characters() { // Non-space characters // file:///test/Ῥόδος/ @@ -981,7 +1427,7 @@ mod tests { let mut point = cursor.point; if !cursor.input_needs_wrap { - point.column -= 1; + point = point.sub(term, Boundary::Grid, 1); } if grid.index(point).flags.contains(Flags::WIDE_CHAR_SPACER) { @@ -1007,6 +1453,13 @@ mod tests { } } + fn process_input(term: &mut Term, c: char) { + match c { + '\t' => term.put_tab(1), + c @ _ => term.input(c), + } + } + let mut hovered_grid_point: Option = None; let mut hyperlink_match = AlacPoint::default()..=AlacPoint::default(); let mut iri_or_path = String::default(); @@ -1098,9 +1551,9 @@ mod tests { term.input('C'); prev_input_point = prev_input_point_from_term(&term); term.input(':'); - term.input(c); + process_input(&mut term, c); } else { - term.input(c); + process_input(&mut term, c); prev_input_point = prev_input_point_from_term(&term); } @@ -1130,15 +1583,6 @@ mod tests { iri_or_path = path.to_string_lossy().into_owned(); } - if cfg!(windows) { - // Handle verbatim and UNC paths for Windows - if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\UNC\"#) { - iri_or_path = format!(r#"\\{stripped}"#); - } else if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\"#) { - iri_or_path = stripped.to_string(); - } - } - let hovered_grid_point = hovered_grid_point.expect("Missing hovered point (👉 or 👈)"); let hovered_char = term.grid().index(hovered_grid_point).c; ( @@ -1161,6 +1605,7 @@ mod tests { match c { // Fullwidth unicode characters used in tests '例' | '🏃' | '🦀' | '🔥' => 2, + '\t' => 8, // it's really 0-8, use the max always _ => 1, } } @@ -1283,11 +1728,9 @@ mod tests { let mut marker_header_row = String::new(); for index in 0..self.term.columns() { let remainder = index % 10; - first_header_row.push_str( - &(index > 0 && remainder == 0) - .then_some((index / 10).to_string()) - .unwrap_or(" ".into()), - ); + if index > 0 && remainder == 0 { + first_header_row.push_str(&format!("{:>10}", (index / 10))); + } second_header_row += &remainder.to_string(); if index == self.expected_hyperlink.hovered_grid_point.column.0 { marker_header_row.push('↓'); @@ -1296,16 +1739,20 @@ mod tests { } } - result += &format!("\n [{}]\n", first_header_row); + let remainder = (self.term.columns() - 1) % 10; + if remainder != 0 { + first_header_row.push_str(&" ".repeat(remainder)); + } + + result += &format!("\n [ {}]\n", first_header_row); result += &format!(" [{}]\n", second_header_row); result += &format!(" {}", marker_header_row); - let spacers: Flags = Flags::LEADING_WIDE_CHAR_SPACER | Flags::WIDE_CHAR_SPACER; for cell in self .term .renderable_content() .display_iter - .filter(|cell| !cell.flags.intersects(spacers)) + .filter(|cell| !cell.flags.intersects(WIDE_CHAR_SPACERS)) { if cell.point.column.0 == 0 { let prefix = @@ -1317,7 +1764,10 @@ mod tests { result += &format!("\n{prefix}[{:>3}] ", cell.point.line.to_string()); } - result.push(cell.c); + match cell.c { + '\t' => result.push(' '), + c @ _ => result.push(c), + } } result @@ -1331,8 +1781,34 @@ mod tests { hyperlink_kind: HyperlinkKind, source_location: &str, ) { + const CARGO_DIR_REGEX: &str = + r#"\s+(Compiling|Checking|Documenting) [^(]+\((?(?.+))\)"#; + const RUST_DIAGNOSTIC_REGEX: &str = r#"\s+(-->|:::|at) (?(?.+?))(:$|$)"#; + const ISSUE_12338_REGEX: &str = + r#"[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2} (?(?.+))"#; + const MULTIPLE_SAME_LINE_REGEX: &str = + r#"(?(?🦀 multiple_same_line 🦀) 🚣(?[0-9]+) 🏛(?[0-9]+)):"#; + const PATH_HYPERLINK_TIMEOUT_MS: u64 = 1000; + thread_local! { - static TEST_REGEX_SEARCHES: RefCell = RefCell::new(RegexSearches::new()); + static TEST_REGEX_SEARCHES: RefCell = + RefCell::new({ + let default_settings_content: Rc = + settings::parse_json_with_comments(&settings::default_settings()).unwrap(); + let default_terminal_settings = TerminalSettings::from_settings(&default_settings_content); + + RegexSearches::new([ + RUST_DIAGNOSTIC_REGEX, + CARGO_DIR_REGEX, + ISSUE_12338_REGEX, + MULTIPLE_SAME_LINE_REGEX, + ] + .into_iter() + .chain(default_terminal_settings.path_hyperlink_regexes + .iter() + .map(AsRef::as_ref)), + PATH_HYPERLINK_TIMEOUT_MS) + }); } let term_size = TermSize::new(columns, total_cells / columns + 2); @@ -1357,12 +1833,16 @@ mod tests { Some((hyperlink_word, true, hyperlink_match)) => { check_hyperlink_match.check_iri_and_match(hyperlink_word, &hyperlink_match); } - _ => { - assert!( - false, - "No hyperlink found\n at {source_location}:\n{}", - check_hyperlink_match.format_renderable_content() - ) + None => { + if expected_hyperlink.hyperlink_match.start() + != expected_hyperlink.hyperlink_match.end() + { + assert!( + false, + "No hyperlink found\n at {source_location}:\n{}", + check_hyperlink_match.format_renderable_content() + ) + } } } } diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 01def426d887309f657efeef1172facec3e16b42..3d70d85f35239778bee61113ebc51eea7d87adcb 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -7,9 +7,10 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; pub use settings::AlternateScroll; + use settings::{ - RegisterSetting, ShowScrollbar, TerminalBlink, TerminalDockPosition, TerminalLineHeight, - VenvSettings, WorkingDirectory, merge_from::MergeFrom, + PathHyperlinkRegex, RegisterSetting, ShowScrollbar, TerminalBlink, TerminalDockPosition, + TerminalLineHeight, VenvSettings, WorkingDirectory, merge_from::MergeFrom, }; use task::Shell; use theme::FontFamilyName; @@ -42,9 +43,12 @@ pub struct TerminalSettings { pub default_height: Pixels, pub detect_venv: VenvSettings, pub max_scroll_history_lines: Option, + pub scroll_multiplier: f32, pub toolbar: Toolbar, pub scrollbar: ScrollbarSettings, pub minimum_contrast: f32, + pub path_hyperlink_regexes: Vec, + pub path_hyperlink_timeout_ms: u64, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -91,7 +95,7 @@ impl settings::Settings for TerminalSettings { ) }), font_features: user_content.font_features, - font_weight: user_content.font_weight.map(FontWeight), + font_weight: user_content.font_weight, line_height: user_content.line_height.unwrap(), env: project_content.env.unwrap(), cursor_shape: user_content.cursor_shape.unwrap().into(), @@ -105,6 +109,7 @@ impl settings::Settings for TerminalSettings { default_width: px(user_content.default_width.unwrap()), default_height: px(user_content.default_height.unwrap()), detect_venv: project_content.detect_venv.unwrap(), + scroll_multiplier: user_content.scroll_multiplier.unwrap(), max_scroll_history_lines: user_content.max_scroll_history_lines, toolbar: Toolbar { breadcrumbs: user_content.toolbar.unwrap().breadcrumbs.unwrap(), @@ -113,6 +118,16 @@ impl settings::Settings for TerminalSettings { show: user_content.scrollbar.unwrap().show, }, minimum_contrast: user_content.minimum_contrast.unwrap(), + path_hyperlink_regexes: project_content + .path_hyperlink_regexes + .unwrap() + .into_iter() + .map(|regex| match regex { + PathHyperlinkRegex::SingleLine(regex) => regex, + PathHyperlinkRegex::MultiLine(regex) => regex.join("\n"), + }) + .collect(), + path_hyperlink_timeout_ms: project_content.path_hyperlink_timeout_ms.unwrap(), } } } diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index 1800562e2fd262d040ef957b402cc650681956a5..eadd00bcbbd7a5469638c2b85d2eb4f1a65b9475 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -39,7 +39,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true shellexpand.workspace = true -smol.workspace = true terminal.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 1ed8f904d2ca13811dbbdea29a1ed2a6e1cc6275..b5324b7c6c7e0c467c657b122717fbf17cf9f7b9 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -632,7 +632,7 @@ impl TerminalElement { ) -> impl Fn(&E, &mut Window, &mut App) { move |event, window, cx| { if steal_focus { - window.focus(&focus_handle); + window.focus(&focus_handle, cx); } else if !focus_handle.is_focused(window) { return; } @@ -661,7 +661,7 @@ impl TerminalElement { let terminal_view = terminal_view.clone(); move |e, window, cx| { - window.focus(&focus); + window.focus(&focus, cx); let scroll_top = terminal_view.read(cx).scroll_top; terminal.update(cx, |terminal, cx| { @@ -1275,7 +1275,7 @@ impl Element for TerminalElement { } for (relative_highlighted_range, color) in - layout.relative_highlighted_ranges.iter() +& layout.relative_highlighted_ranges { if let Some((start_y, highlighted_range_lines)) = to_highlighted_range_lines(relative_highlighted_range, layout, origin) @@ -1542,11 +1542,13 @@ fn to_highlighted_range_lines( } let clamped_start_line = unclamped_start.line.0.max(0) as usize; + let clamped_end_line = unclamped_end .line .0 .min(layout.dimensions.num_lines() as i32) as usize; - //Convert the start of the range to pixels + + // Convert the start of the range to pixels let start_y = origin.y + clamped_start_line as f32 * layout.dimensions.line_height; // Step 3. Expand ranges that cross lines into a collection of single-line ranges. @@ -1556,10 +1558,11 @@ fn to_highlighted_range_lines( let mut line_start = 0; let mut line_end = layout.dimensions.columns(); - if line == clamped_start_line { + if line == clamped_start_line && unclamped_start.line.0 >= 0 { line_start = unclamped_start.column.0; } - if line == clamped_end_line { + if line == clamped_end_line && unclamped_end.line.0 <= layout.dimensions.num_lines() as i32 + { line_end = unclamped_end.column.0 + 1; // +1 for inclusive } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 941689514d806acffb54fc0bfb8ecad86c1d2e70..85c6b81f406597e097cabc27408d3df70aad6395 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -167,7 +167,7 @@ impl TerminalPanel { // hence we focus that first. Otherwise, we'd end up without a focused element, as // context menu will be gone the moment we spawn the modal. .action( - "Spawn task", + "Spawn Task", zed_actions::Spawn::modal().boxed_clone(), ) }); @@ -342,7 +342,7 @@ impl TerminalPanel { pane::Event::RemovedItem { .. } => self.serialize(cx), pane::Event::Remove { focus_on_pane } => { let pane_count_before_removal = self.center.panes().len(); - let _removal_result = self.center.remove(pane); + let _removal_result = self.center.remove(pane, cx); if pane_count_before_removal == 1 { self.center.first_pane().update(cx, |pane, cx| { pane.set_zoomed(false, cx); @@ -351,7 +351,7 @@ impl TerminalPanel { } else if let Some(focus_on_pane) = focus_on_pane.as_ref().or_else(|| self.center.panes().pop()) { - focus_on_pane.focus_handle(cx).focus(window); + focus_on_pane.focus_handle(cx).focus(window, cx); } } pane::Event::ZoomIn => { @@ -393,8 +393,11 @@ impl TerminalPanel { }; panel .update_in(cx, |panel, window, cx| { - panel.center.split(&pane, &new_pane, direction).log_err(); - window.focus(&new_pane.focus_handle(cx)); + panel + .center + .split(&pane, &new_pane, direction, cx) + .log_err(); + window.focus(&new_pane.focus_handle(cx), cx); }) .ok(); }) @@ -415,8 +418,8 @@ impl TerminalPanel { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx); }); - self.center.split(&pane, &new_pane, direction).log_err(); - window.focus(&new_pane.focus_handle(cx)); + self.center.split(&pane, &new_pane, direction, cx).log_err(); + window.focus(&new_pane.focus_handle(cx), cx); } } pane::Event::Focus => { @@ -550,7 +553,7 @@ impl TerminalPanel { let builder = ShellBuilder::new(&shell, is_windows); let command_label = builder.command_label(task.command.as_deref().unwrap_or("")); - let (command, args) = builder.build(task.command.clone(), &task.args); + let (command, args) = builder.build_no_quote(task.command.clone(), &task.args); let task = SpawnInTerminal { command_label, @@ -995,7 +998,7 @@ impl TerminalPanel { RevealStrategy::NoFocus => match reveal_target { RevealTarget::Center => { task_workspace.update_in(cx, |workspace, window, cx| { - workspace.active_pane().focus_handle(cx).focus(window); + workspace.active_pane().focus_handle(cx).focus(window, cx); })?; } RevealTarget::Dock => { @@ -1050,7 +1053,7 @@ impl TerminalPanel { .center .find_pane_in_direction(&self.active_pane, direction, cx) { - window.focus(&pane.focus_handle(cx)); + window.focus(&pane.focus_handle(cx), cx); } else { self.workspace .update(cx, |workspace, cx| { @@ -1066,7 +1069,7 @@ impl TerminalPanel { .find_pane_in_direction(&self.active_pane, direction, cx) .cloned() { - self.center.swap(&self.active_pane, &to); + self.center.swap(&self.active_pane, &to, cx); cx.notify(); } } @@ -1074,7 +1077,7 @@ impl TerminalPanel { fn move_pane_to_border(&mut self, direction: SplitDirection, cx: &mut Context) { if self .center - .move_to_border(&self.active_pane, direction) + .move_to_border(&self.active_pane, direction, cx) .unwrap() { cx.notify(); @@ -1189,6 +1192,7 @@ pub fn new_terminal_pane( &this_pane, &new_pane, split_direction, + cx, )?; anyhow::Ok(new_pane) }) @@ -1293,7 +1297,7 @@ fn add_paths_to_terminal( .active_item() .and_then(|item| item.downcast::()) { - window.focus(&terminal_view.focus_handle(cx)); + window.focus(&terminal_view.focus_handle(cx), cx); let mut new_text = paths.iter().map(|path| format!(" {path:?}")).join(""); new_text.push(' '); terminal_view.update(cx, |terminal_view, cx| { @@ -1447,7 +1451,7 @@ impl Render for TerminalPanel { .position(|pane| **pane == terminal_panel.active_pane) { let next_ix = (ix + 1) % panes.len(); - window.focus(&panes[next_ix].focus_handle(cx)); + window.focus(&panes[next_ix].focus_handle(cx), cx); } }), ) @@ -1459,7 +1463,7 @@ impl Render for TerminalPanel { .position(|pane| **pane == terminal_panel.active_pane) { let prev_ix = cmp::min(ix.wrapping_sub(1), panes.len() - 1); - window.focus(&panes[prev_ix].focus_handle(cx)); + window.focus(&panes[prev_ix].focus_handle(cx), cx); } }, )) @@ -1467,7 +1471,7 @@ impl Render for TerminalPanel { cx.listener(|terminal_panel, action: &ActivatePane, window, cx| { let panes = terminal_panel.center.panes(); if let Some(&pane) = panes.get(action.0) { - window.focus(&pane.read(cx).focus_handle(cx)); + window.focus(&pane.read(cx).focus_handle(cx), cx); } else { let future = terminal_panel.new_pane_with_cloned_active_terminal(window, cx); @@ -1482,10 +1486,11 @@ impl Render for TerminalPanel { &terminal_panel.active_pane, &new_pane, SplitDirection::Right, + cx, ) .log_err(); let new_pane = new_pane.read(cx); - window.focus(&new_pane.focus_handle(cx)); + window.focus(&new_pane.focus_handle(cx), cx); }, ); } diff --git a/crates/terminal_view/src/terminal_tab_tooltip.rs b/crates/terminal_view/src/terminal_tab_tooltip.rs deleted file mode 100644 index 6324c0999a8231bb1e633ef39343944783029895..0000000000000000000000000000000000000000 --- a/crates/terminal_view/src/terminal_tab_tooltip.rs +++ /dev/null @@ -1,36 +0,0 @@ -use gpui::{IntoElement, Render}; -use ui::{Divider, prelude::*, tooltip_container}; - -pub struct TerminalTooltip { - title: SharedString, - pid: u32, -} - -impl TerminalTooltip { - pub fn new(title: impl Into, pid: u32) -> Self { - Self { - title: title.into(), - pid, - } - } -} - -impl Render for TerminalTooltip { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - tooltip_container(cx, move |this, _cx| { - this.occlude() - .on_mouse_move(|_, _window, cx| cx.stop_propagation()) - .child( - v_flex() - .gap_1() - .child(Label::new(self.title.clone())) - .child(Divider::horizontal()) - .child( - Label::new(format!("Process ID (PID): {}", self.pid)) - .color(Color::Muted) - .size(LabelSize::Small), - ), - ) - }) - } -} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 2bdce16125e3a70eaa94779cf0297c62e87f9cac..e7e60ff4b31dfbdd16b7de8841285d81fc311fc5 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -4,13 +4,12 @@ pub mod terminal_panel; mod terminal_path_like_target; pub mod terminal_scrollbar; mod terminal_slash_command; -pub mod terminal_tab_tooltip; use assistant_slash_command::SlashCommandRegistry; -use editor::{EditorSettings, actions::SelectAll}; +use editor::{EditorSettings, actions::SelectAll, blink_manager::BlinkManager}; use gpui::{ - Action, AnyElement, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, - KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Render, + Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, FocusHandle, + Focusable, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, anchored, deferred, div, }; use persistence::TERMINAL_DB; @@ -32,9 +31,8 @@ use terminal_panel::TerminalPanel; use terminal_path_like_target::{hover_path_like_target, open_path_like_target}; use terminal_scrollbar::TerminalScrollHandle; use terminal_slash_command::TerminalSlashCommand; -use terminal_tab_tooltip::TerminalTooltip; use ui::{ - ContextMenu, Icon, IconName, Label, ScrollAxes, Scrollbars, Tooltip, WithScrollbar, h_flex, + ContextMenu, Divider, ScrollAxes, Scrollbars, Tooltip, WithScrollbar, prelude::*, scrollbars::{self, GlobalSetting, ScrollbarVisibility}, }; @@ -51,7 +49,6 @@ use workspace::{ use serde::Deserialize; use settings::{Settings, SettingsStore, TerminalBlink, WorkingDirectory}; -use smol::Timer; use zed_actions::assistant::InlineAssist; use std::{ @@ -126,12 +123,10 @@ pub struct TerminalView { has_bell: bool, context_menu: Option<(Entity, gpui::Point, Subscription)>, cursor_shape: CursorShape, - blink_state: bool, + blink_manager: Entity, mode: TerminalMode, blinking_terminal_enabled: bool, cwd_serialized: bool, - blinking_paused: bool, - blink_epoch: usize, hover: Option, hover_tooltip_update: Task<()>, workspace_id: Option, @@ -237,6 +232,25 @@ impl TerminalView { let scroll_handle = TerminalScrollHandle::new(terminal.read(cx)); + let blink_manager = cx.new(|cx| { + BlinkManager::new( + CURSOR_BLINK_INTERVAL, + |cx| { + !matches!( + TerminalSettings::get_global(cx).blinking, + TerminalBlink::Off + ) + }, + cx, + ) + }); + + let _subscriptions = vec![ + focus_in, + focus_out, + cx.observe(&blink_manager, |_, _, cx| cx.notify()), + cx.observe_global::(Self::settings_changed), + ]; Self { terminal, workspace: workspace_handle, @@ -245,10 +259,8 @@ impl TerminalView { focus_handle, context_menu: None, cursor_shape, - blink_state: true, + blink_manager, blinking_terminal_enabled: false, - blinking_paused: false, - blink_epoch: 0, hover: None, hover_tooltip_update: Task::ready(()), mode: TerminalMode::Standalone, @@ -259,11 +271,7 @@ impl TerminalView { scroll_handle, cwd_serialized: false, ime_state: None, - _subscriptions: vec![ - focus_in, - focus_out, - cx.observe_global::(Self::settings_changed), - ], + _subscriptions, _terminal_subscriptions: terminal_subscriptions, } } @@ -401,7 +409,7 @@ impl TerminalView { ) }); - window.focus(&context_menu.focus_handle(cx)); + window.focus(&context_menu.focus_handle(cx), cx); let subscription = cx.subscribe_in( &context_menu, window, @@ -424,6 +432,11 @@ impl TerminalView { let breadcrumb_visibility_changed = self.show_breadcrumbs != settings.toolbar.breadcrumbs; self.show_breadcrumbs = settings.toolbar.breadcrumbs; + let should_blink = match settings.blinking { + TerminalBlink::Off => false, + TerminalBlink::On => true, + TerminalBlink::TerminalControlled => self.blinking_terminal_enabled, + }; let new_cursor_shape = settings.cursor_shape; let old_cursor_shape = self.cursor_shape; if old_cursor_shape != new_cursor_shape { @@ -433,6 +446,15 @@ impl TerminalView { }); } + self.blink_manager.update( + cx, + if should_blink { + BlinkManager::enable + } else { + BlinkManager::disable + }, + ); + if breadcrumb_visibility_changed { cx.emit(ItemEvent::UpdateBreadcrumbs); } @@ -519,7 +541,12 @@ impl TerminalView { return; } } - self.terminal.update(cx, |term, _| term.scroll_wheel(event)); + self.terminal.update(cx, |term, cx| { + term.scroll_wheel( + event, + TerminalSettings::get_global(cx).scroll_multiplier.max(0.01), + ) + }); } fn scroll_line_up(&mut self, _: &ScrollLineUp, _: &mut Window, cx: &mut Context) { @@ -605,9 +632,8 @@ impl TerminalView { } pub fn should_show_cursor(&self, focused: bool, cx: &mut Context) -> bool { - //Don't blink the cursor when not focused, blinking is disabled, or paused + // Always show cursor when not focused or in special modes if !focused - || self.blinking_paused || self .terminal .read(cx) @@ -618,45 +644,18 @@ impl TerminalView { return true; } + // When focused, check blinking settings and blink manager state match TerminalSettings::get_global(cx).blinking { - //If the user requested to never blink, don't blink it. TerminalBlink::Off => true, - //If the terminal is controlling it, check terminal mode TerminalBlink::TerminalControlled => { - !self.blinking_terminal_enabled || self.blink_state + !self.blinking_terminal_enabled || self.blink_manager.read(cx).visible() } - TerminalBlink::On => self.blink_state, + TerminalBlink::On => self.blink_manager.read(cx).visible(), } } - fn blink_cursors(&mut self, epoch: usize, window: &mut Window, cx: &mut Context) { - if epoch == self.blink_epoch && !self.blinking_paused { - self.blink_state = !self.blink_state; - cx.notify(); - - let epoch = self.next_blink_epoch(); - cx.spawn_in(window, async move |this, cx| { - Timer::after(CURSOR_BLINK_INTERVAL).await; - this.update_in(cx, |this, window, cx| this.blink_cursors(epoch, window, cx)) - .ok(); - }) - .detach(); - } - } - - pub fn pause_cursor_blinking(&mut self, window: &mut Window, cx: &mut Context) { - self.blink_state = true; - cx.notify(); - - let epoch = self.next_blink_epoch(); - cx.spawn_in(window, async move |this, cx| { - Timer::after(CURSOR_BLINK_INTERVAL).await; - this.update_in(cx, |this, window, cx| { - this.resume_cursor_blinking(epoch, window, cx) - }) - .ok(); - }) - .detach(); + pub fn pause_cursor_blinking(&mut self, _window: &mut Window, cx: &mut Context) { + self.blink_manager.update(cx, BlinkManager::pause_blinking); } pub fn terminal(&self) -> &Entity { @@ -680,23 +679,6 @@ impl TerminalView { cx.notify(); } - fn next_blink_epoch(&mut self) -> usize { - self.blink_epoch += 1; - self.blink_epoch - } - - fn resume_cursor_blinking( - &mut self, - epoch: usize, - window: &mut Window, - cx: &mut Context, - ) { - if epoch == self.blink_epoch { - self.blinking_paused = false; - self.blink_cursors(epoch, window, cx); - } - } - ///Attempt to paste the clipboard into the terminal fn copy(&mut self, _: &Copy, _: &mut Window, cx: &mut Context) { self.terminal.update(cx, |term, _| term.copy(None)); @@ -705,12 +687,32 @@ impl TerminalView { ///Attempt to paste the clipboard into the terminal fn paste(&mut self, _: &Paste, _: &mut Window, cx: &mut Context) { - if let Some(clipboard_string) = cx.read_from_clipboard().and_then(|item| item.text()) { + let Some(clipboard) = cx.read_from_clipboard() else { + return; + }; + + if clipboard.entries().iter().any(|entry| match entry { + ClipboardEntry::Image(image) => !image.bytes.is_empty(), + _ => false, + }) { + self.forward_ctrl_v(cx); + return; + } + + if let Some(text) = clipboard.text() { self.terminal - .update(cx, |terminal, _cx| terminal.paste(&clipboard_string)); + .update(cx, |terminal, _cx| terminal.paste(&text)); } } + /// Emits a raw Ctrl+V so TUI agents can read the OS clipboard directly + /// and attach images using their native workflows. + fn forward_ctrl_v(&self, cx: &mut Context) { + self.terminal.update(cx, |term, _| { + term.input(vec![0x16]); + }); + } + fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context) { self.clear_bell(cx); self.terminal.update(cx, |term, _| { @@ -888,11 +890,21 @@ fn subscribe_for_terminal_events( } Event::BlinkChanged(blinking) => { + terminal_view.blinking_terminal_enabled = *blinking; + + // If in terminal-controlled mode and focused, update blink manager if matches!( TerminalSettings::get_global(cx).blinking, TerminalBlink::TerminalControlled - ) { - terminal_view.blinking_terminal_enabled = *blinking; + ) && terminal_view.focus_handle.is_focused(window) + { + terminal_view.blink_manager.update(cx, |manager, cx| { + if *blinking { + manager.enable(cx); + } else { + manager.disable(cx); + } + }); } } @@ -1018,12 +1030,23 @@ impl TerminalView { terminal.set_cursor_shape(self.cursor_shape); terminal.focus_in(); }); - self.blink_cursors(self.blink_epoch, window, cx); + + let should_blink = match TerminalSettings::get_global(cx).blinking { + TerminalBlink::Off => false, + TerminalBlink::On => true, + TerminalBlink::TerminalControlled => self.blinking_terminal_enabled, + }; + + if should_blink { + self.blink_manager.update(cx, BlinkManager::enable); + } + window.invalidate_character_coordinates(); cx.notify(); } - fn focus_out(&mut self, _: &mut Window, cx: &mut Context) { + fn focus_out(&mut self, _window: &mut Window, cx: &mut Context) { + self.blink_manager.update(cx, BlinkManager::disable); self.terminal.update(cx, |terminal, _| { terminal.focus_out(); terminal.set_cursor_shape(CursorShape::Hollow); @@ -1113,7 +1136,7 @@ impl Render for TerminalView { ScrollAxes::Vertical, cx.theme().colors().editor_background, ) - .tracked_scroll_handle(self.scroll_handle.clone()), + .tracked_scroll_handle(&self.scroll_handle), window, cx, ) @@ -1135,14 +1158,24 @@ impl Item for TerminalView { type Event = ItemEvent; fn tab_tooltip_content(&self, cx: &App) -> Option { - let terminal = self.terminal().read(cx); - let title = terminal.title(false); - let pid = terminal.pid_getter()?.fallback_pid(); - - Some(TabTooltipContent::Custom(Box::new(move |_window, cx| { - cx.new(|_| TerminalTooltip::new(title.clone(), pid.as_u32())) - .into() - }))) + Some(TabTooltipContent::Custom(Box::new(Tooltip::element({ + let terminal = self.terminal().read(cx); + let title = terminal.title(false); + let pid = terminal.pid_getter()?.fallback_pid(); + + move |_, _| { + v_flex() + .gap_1() + .child(Label::new(title.clone())) + .child(h_flex().flex_grow().child(Divider::horizontal())) + .child( + Label::new(format!("Process ID (PID): {}", pid)) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element() + } + })))) } fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement { @@ -1264,7 +1297,11 @@ impl Item for TerminalView { false } - fn as_searchable(&self, handle: &Entity) -> Option> { + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { Some(Box::new(handle.clone())) } @@ -1425,6 +1462,7 @@ impl SearchableItem for TerminalView { fn update_matches( &mut self, matches: &[Self::Match], + _active_match_index: Option, _window: &mut Window, cx: &mut Context, ) { @@ -1447,7 +1485,6 @@ impl SearchableItem for TerminalView { &mut self, index: usize, _: &[Self::Match], - _collapse: bool, _window: &mut Window, cx: &mut Context, ) { @@ -1552,7 +1589,8 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti .read(cx) .active_project_directory(cx) .as_deref() - .map(Path::to_path_buf), + .map(Path::to_path_buf) + .or_else(|| first_project_directory(workspace, cx)), WorkingDirectory::FirstProjectDirectory => first_project_directory(workspace, cx), WorkingDirectory::AlwaysHome => None, WorkingDirectory::Always { directory } => { @@ -1566,10 +1604,13 @@ pub(crate) fn default_working_directory(workspace: &Workspace, cx: &App) -> Opti ///Gets the first project's home directory, or the home directory fn first_project_directory(workspace: &Workspace, cx: &App) -> Option { let worktree = workspace.worktrees(cx).next()?.read(cx); - if !worktree.root_entry()?.is_dir() { - return None; + let worktree_path = worktree.abs_path(); + if worktree.root_entry()?.is_dir() { + Some(worktree_path.to_path_buf()) + } else { + // If worktree is a file, return its parent directory + worktree_path.parent().map(|p| p.to_path_buf()) } - Some(worktree.abs_path().to_path_buf()) } #[cfg(test)] @@ -1602,7 +1643,7 @@ mod tests { }); } - // No active entry, but a worktree, worktree is a file -> home_dir() + // No active entry, but a worktree, worktree is a file -> parent directory #[gpui::test] async fn no_active_entry_worktree_is_file(cx: &mut TestAppContext) { let (project, workspace) = init_test(cx).await; @@ -1617,9 +1658,9 @@ mod tests { assert!(workspace.worktrees(cx).next().is_some()); let res = default_working_directory(workspace, cx); - assert_eq!(res, None); + assert_eq!(res, Some(Path::new("/").to_path_buf())); let res = first_project_directory(workspace, cx); - assert_eq!(res, None); + assert_eq!(res, Some(Path::new("/").to_path_buf())); }); } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index cf2febdfc505b426fd8d224a2dc29f18d22cd1a8..bf660b1302466e2b244a86b3d1e58ea2b6991067 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -6,16 +6,38 @@ use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::{Bias, Dimensions}; /// A timestamped position in a buffer -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, Hash)] pub struct Anchor { + /// The timestamp of the operation that inserted the text + /// in which this anchor is located. pub timestamp: clock::Lamport, - /// The byte offset in the buffer + /// The byte offset into the text inserted in the operation + /// at `timestamp`. pub offset: usize, - /// Describes which character the anchor is biased towards + /// Whether this anchor stays attached to the character *before* or *after* + /// the offset. pub bias: Bias, pub buffer_id: Option, } +impl Debug for Anchor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.is_min() { + return write!(f, "Anchor::min({:?})", self.buffer_id); + } + if self.is_max() { + return write!(f, "Anchor::max({:?})", self.buffer_id); + } + + f.debug_struct("Anchor") + .field("timestamp", &self.timestamp) + .field("offset", &self.offset) + .field("bias", &self.bias) + .field("buffer_id", &self.buffer_id) + .finish() + } +} + impl Anchor { pub const MIN: Self = Self { timestamp: clock::Lamport::MIN, @@ -31,6 +53,36 @@ impl Anchor { buffer_id: None, }; + pub fn min_for_buffer(buffer_id: BufferId) -> Self { + Self { + timestamp: clock::Lamport::MIN, + offset: usize::MIN, + bias: Bias::Left, + buffer_id: Some(buffer_id), + } + } + + pub fn max_for_buffer(buffer_id: BufferId) -> Self { + Self { + timestamp: clock::Lamport::MAX, + offset: usize::MAX, + bias: Bias::Right, + buffer_id: Some(buffer_id), + } + } + + pub fn min_min_range_for_buffer(buffer_id: BufferId) -> std::ops::Range { + let min = Self::min_for_buffer(buffer_id); + min..min + } + pub fn max_max_range_for_buffer(buffer_id: BufferId) -> std::ops::Range { + let max = Self::max_for_buffer(buffer_id); + max..max + } + pub fn min_max_range_for_buffer(buffer_id: BufferId) -> std::ops::Range { + Self::min_for_buffer(buffer_id)..Self::max_for_buffer(buffer_id) + } + pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering { let fragment_id_comparison = if self.timestamp == other.timestamp { Ordering::Equal @@ -91,7 +143,7 @@ impl Anchor { /// Returns true when the [`Anchor`] is located inside a visible fragment. pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool { - if *self == Anchor::MIN || *self == Anchor::MAX { + if self.is_min() || self.is_max() { true } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) { false @@ -109,6 +161,18 @@ impl Anchor { item.is_some_and(|fragment| fragment.visible) } } + + pub fn is_min(&self) -> bool { + self.timestamp == clock::Lamport::MIN + && self.offset == usize::MIN + && self.bias == Bias::Left + } + + pub fn is_max(&self) -> bool { + self.timestamp == clock::Lamport::MAX + && self.offset == usize::MAX + && self.bias == Bias::Right + } } pub trait OffsetRangeExt { diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index e690792d0c903031f8fc7e8bf81215bf0db0e336..e355f70c492ff3cdf2632f4e6204723fb05c9235 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -2,11 +2,15 @@ use crate::{Anchor, BufferSnapshot, TextDimension}; use std::cmp::Ordering; use std::ops::Range; -#[derive(Copy, Clone, Debug, PartialEq)] +#[derive(Default, Copy, Clone, Debug, PartialEq)] pub enum SelectionGoal { + #[default] None, HorizontalPosition(f64), - HorizontalRange { start: f64, end: f64 }, + HorizontalRange { + start: f64, + end: f64, + }, WrappedHorizontalPosition((u32, f32)), } @@ -19,12 +23,6 @@ pub struct Selection { pub goal: SelectionGoal, } -impl Default for SelectionGoal { - fn default() -> Self { - Self::None - } -} - impl Selection { /// A place where the selection had stopped at. pub fn head(&self) -> T { @@ -132,9 +130,15 @@ impl Selection { } } -impl Selection { +impl Selection { + pub fn len(&self) -> ::Output { + self.end - self.start + } +} + +impl Selection { #[cfg(feature = "test-support")] - pub fn from_offset(offset: usize) -> Self { + pub fn from_offset(offset: T) -> Self { Selection { id: 0, start: offset, @@ -144,7 +148,7 @@ impl Selection { } } - pub fn equals(&self, offset_range: &Range) -> bool { + pub fn equals(&self, offset_range: &Range) -> bool { self.start == offset_range.start && self.end == offset_range.end } } diff --git a/crates/text/src/subscription.rs b/crates/text/src/subscription.rs index 878e8a2cfe0a82782300089881b8cf31b428d2c2..50857a2de4ca2f9a89514a482973a0d14cce2163 100644 --- a/crates/text/src/subscription.rs +++ b/crates/text/src/subscription.rs @@ -6,36 +6,55 @@ use std::{ }; #[derive(Default)] -pub struct Topic(Mutex>>>>); +pub struct Topic(Mutex>>>>); -pub struct Subscription(Arc>>); +pub struct Subscription(Arc>>); -impl Topic { - pub fn subscribe(&mut self) -> Subscription { +impl Topic +where + T: 'static + + Copy + + Ord + + std::ops::Sub + + std::ops::Add + + std::ops::AddAssign + + Default, + TDelta: Ord + Copy, +{ + pub fn subscribe(&mut self) -> Subscription { let subscription = Subscription(Default::default()); self.0.get_mut().push(Arc::downgrade(&subscription.0)); subscription } - pub fn publish(&self, edits: impl Clone + IntoIterator>) { + pub fn publish(&self, edits: impl Clone + IntoIterator>) { publish(&mut self.0.lock(), edits); } - pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator>) { + pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator>) { publish(self.0.get_mut(), edits); } } -impl Subscription { - pub fn consume(&self) -> Patch { +impl Subscription { + pub fn consume(&self) -> Patch { mem::take(&mut *self.0.lock()) } } -fn publish( - subscriptions: &mut Vec>>>, - edits: impl Clone + IntoIterator>, -) { +fn publish( + subscriptions: &mut Vec>>>, + edits: impl Clone + IntoIterator>, +) where + T: 'static + + Copy + + Ord + + std::ops::Sub + + std::ops::Add + + std::ops::AddAssign + + Default, + TDelta: Ord + Copy, +{ subscriptions.retain(|subscription| { if let Some(subscription) = subscription.upgrade() { let mut patch = subscription.lock(); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d9a6b2bb26606c4192ef66351a5bd8f6bca667e7..866552e4e5d9039a9517a556323a4ba7a89fcee1 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -39,6 +39,7 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{Dimensions, FilterCursor, SumTree, TreeMap, TreeSet}; use undo_map::UndoMap; +use util::debug_panic; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; @@ -54,7 +55,7 @@ pub struct Buffer { deferred_ops: OperationQueue, deferred_replicas: HashSet, pub lamport_clock: clock::Lamport, - subscriptions: Topic, + subscriptions: Topic, edit_id_resolvers: HashMap>>, wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>, } @@ -1619,7 +1620,7 @@ impl Buffer { self.edited_ranges_for_edit_ids(&transaction.edit_ids) } - pub fn subscribe(&mut self) -> Subscription { + pub fn subscribe(&mut self) -> Subscription { self.subscriptions.subscribe() } @@ -1652,10 +1653,7 @@ impl Buffer { ) -> impl 'static + Future> + use { let mut futures = Vec::new(); for anchor in anchors { - if !self.version.observed(anchor.timestamp) - && anchor != Anchor::MAX - && anchor != Anchor::MIN - { + if !self.version.observed(anchor.timestamp) && !anchor.is_max() && !anchor.is_min() { let (tx, rx) = oneshot::channel(); self.edit_id_resolvers .entry(anchor.timestamp) @@ -2258,9 +2256,9 @@ impl BufferSnapshot { let mut position = D::zero(()); anchors.map(move |(anchor, payload)| { - if *anchor == Anchor::MIN { + if anchor.is_min() { return (D::zero(()), payload); - } else if *anchor == Anchor::MAX { + } else if anchor.is_max() { return (D::from_text_summary(&self.visible_text.summary()), payload); } @@ -2318,13 +2316,18 @@ impl BufferSnapshot { } pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize { - if *anchor == Anchor::MIN { + if anchor.is_min() { 0 - } else if *anchor == Anchor::MAX { + } else if anchor.is_max() { self.visible_text.len() } else { - debug_assert!(anchor.buffer_id == Some(self.remote_id)); - debug_assert!(self.version.observed(anchor.timestamp)); + debug_assert_eq!(anchor.buffer_id, Some(self.remote_id)); + debug_assert!( + self.version.observed(anchor.timestamp), + "Anchor timestamp {:?} not observed by buffer {:?}", + anchor.timestamp, + self.version + ); let anchor_key = InsertionFragmentKey { timestamp: anchor.timestamp, split_offset: anchor.offset, @@ -2393,9 +2396,9 @@ impl BufferSnapshot { } fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> { - if *anchor == Anchor::MIN { + if anchor.is_min() { Some(Locator::min_ref()) - } else if *anchor == Anchor::MAX { + } else if anchor.is_max() { Some(Locator::max_ref()) } else { let anchor_key = InsertionFragmentKey { @@ -2438,18 +2441,33 @@ impl BufferSnapshot { self.anchor_at_offset(position.to_offset(self), bias) } - fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor { + fn anchor_at_offset(&self, mut offset: usize, bias: Bias) -> Anchor { if bias == Bias::Left && offset == 0 { - Anchor::MIN - } else if bias == Bias::Right && offset == self.len() { - Anchor::MAX + Anchor::min_for_buffer(self.remote_id) + } else if bias == Bias::Right + && ((!cfg!(debug_assertions) && offset >= self.len()) || offset == self.len()) + { + Anchor::max_for_buffer(self.remote_id) } else { - if offset > self.visible_text.len() { - panic!("offset {} is out of bounds", offset) + if self + .visible_text + .assert_char_boundary::<{ cfg!(debug_assertions) }>(offset) + { + offset = match bias { + Bias::Left => self.visible_text.floor_char_boundary(offset), + Bias::Right => self.visible_text.ceil_char_boundary(offset), + }; } - self.visible_text.assert_char_boundary(offset); let (start, _, item) = self.fragments.find::(&None, &offset, bias); - let fragment = item.unwrap(); + let Some(fragment) = item else { + // We got a bad offset, likely out of bounds + debug_panic!( + "Failed to find fragment at offset {} (len: {})", + offset, + self.len() + ); + return Anchor::max_for_buffer(self.remote_id); + }; let overshoot = offset - start; Anchor { timestamp: fragment.timestamp, @@ -2461,8 +2479,8 @@ impl BufferSnapshot { } pub fn can_resolve(&self, anchor: &Anchor) -> bool { - *anchor == Anchor::MIN - || *anchor == Anchor::MAX + anchor.is_min() + || anchor.is_max() || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp)) } @@ -3130,43 +3148,48 @@ pub trait ToOffset { } impl ToOffset for Point { + #[inline] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot.point_to_offset(*self) } } impl ToOffset for usize { - #[track_caller] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { - assert!( - *self <= snapshot.len(), - "offset {} is out of range, snapshot length is {}", - self, - snapshot.len() - ); - *self + if snapshot + .as_rope() + .assert_char_boundary::<{ cfg!(debug_assertions) }>(*self) + { + snapshot.as_rope().floor_char_boundary(*self) + } else { + *self + } } } impl ToOffset for Anchor { + #[inline] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot.summary_for_anchor(self) } } impl ToOffset for &T { + #[inline] fn to_offset(&self, content: &BufferSnapshot) -> usize { (*self).to_offset(content) } } impl ToOffset for PointUtf16 { + #[inline] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot.point_utf16_to_offset(*self) } } impl ToOffset for Unclipped { + #[inline] fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot.unclipped_point_utf16_to_offset(*self) } @@ -3177,24 +3200,28 @@ pub trait ToPoint { } impl ToPoint for Anchor { + #[inline] fn to_point(&self, snapshot: &BufferSnapshot) -> Point { snapshot.summary_for_anchor(self) } } impl ToPoint for usize { + #[inline] fn to_point(&self, snapshot: &BufferSnapshot) -> Point { snapshot.offset_to_point(*self) } } impl ToPoint for Point { + #[inline] fn to_point(&self, _: &BufferSnapshot) -> Point { *self } } impl ToPoint for Unclipped { + #[inline] fn to_point(&self, snapshot: &BufferSnapshot) -> Point { snapshot.unclipped_point_utf16_to_point(*self) } @@ -3205,24 +3232,28 @@ pub trait ToPointUtf16 { } impl ToPointUtf16 for Anchor { + #[inline] fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 { snapshot.summary_for_anchor(self) } } impl ToPointUtf16 for usize { + #[inline] fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 { snapshot.offset_to_point_utf16(*self) } } impl ToPointUtf16 for PointUtf16 { + #[inline] fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 { *self } } impl ToPointUtf16 for Point { + #[inline] fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 { snapshot.point_to_point_utf16(*self) } @@ -3233,18 +3264,21 @@ pub trait ToOffsetUtf16 { } impl ToOffsetUtf16 for Anchor { + #[inline] fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 { snapshot.summary_for_anchor(self) } } impl ToOffsetUtf16 for usize { + #[inline] fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 { snapshot.offset_to_offset_utf16(*self) } } impl ToOffsetUtf16 for OffsetUtf16 { + #[inline] fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 { *self } @@ -3255,24 +3289,28 @@ pub trait FromAnchor { } impl FromAnchor for Anchor { + #[inline] fn from_anchor(anchor: &Anchor, _snapshot: &BufferSnapshot) -> Self { *anchor } } impl FromAnchor for Point { + #[inline] fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { snapshot.summary_for_anchor(anchor) } } impl FromAnchor for PointUtf16 { + #[inline] fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { snapshot.summary_for_anchor(anchor) } } impl FromAnchor for usize { + #[inline] fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { snapshot.summary_for_anchor(anchor) } @@ -3349,6 +3387,25 @@ impl LineEnding { } } +pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator { + rope.chunks().flat_map(move |chunk| { + let mut newline = false; + let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str()); + chunk + .lines() + .flat_map(move |line| { + let ending = if newline { + Some(line_ending.as_str()) + } else { + None + }; + newline = true; + ending.into_iter().chain([line]) + }) + .chain(end_with_newline) + }) +} + #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index a9cd163b8c634f6c3fd8061164b72f8b54127c81..82be2896c67f155ac61de1ca6afb058adbf5ea9c 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -9,11 +9,17 @@ pub(crate) fn neutral() -> ColorScaleSet { } const ADDED_COLOR: Hsla = Hsla { - h: 142. / 360., - s: 0.68, - l: 0.45, + h: 134. / 360., + s: 0.55, + l: 0.40, a: 1.0, }; +const WORD_ADDED_COLOR: Hsla = Hsla { + h: 134. / 360., + s: 0.55, + l: 0.40, + a: 0.35, +}; const MODIFIED_COLOR: Hsla = Hsla { h: 48. / 360., s: 0.76, @@ -21,11 +27,17 @@ const MODIFIED_COLOR: Hsla = Hsla { a: 1.0, }; const REMOVED_COLOR: Hsla = Hsla { - h: 355. / 360., - s: 0.65, - l: 0.65, + h: 350. / 360., + s: 0.88, + l: 0.25, a: 1.0, }; +const WORD_DELETED_COLOR: Hsla = Hsla { + h: 350. / 360., + s: 0.88, + l: 0.25, + a: 0.80, +}; /// The default colors for the theme. /// @@ -79,6 +91,7 @@ impl ThemeColors { tab_inactive_background: neutral().light().step_2(), tab_active_background: neutral().light().step_1(), search_match_background: neutral().light().step_5(), + search_active_match_background: neutral().light().step_7(), panel_background: neutral().light().step_2(), panel_focused_border: blue().light().step_10(), panel_indent_guide: neutral().light_alpha().step_5(), @@ -152,6 +165,8 @@ impl ThemeColors { version_control_renamed: MODIFIED_COLOR, version_control_conflict: orange().light().step_12(), version_control_ignored: gray().light().step_12(), + version_control_word_added: WORD_ADDED_COLOR, + version_control_word_deleted: WORD_DELETED_COLOR, version_control_conflict_marker_ours: green().light().step_10().alpha(0.5), version_control_conflict_marker_theirs: blue().light().step_10().alpha(0.5), vim_normal_background: system.transparent, @@ -214,6 +229,7 @@ impl ThemeColors { tab_inactive_background: neutral().dark().step_2(), tab_active_background: neutral().dark().step_1(), search_match_background: neutral().dark().step_5(), + search_active_match_background: neutral().dark().step_3(), panel_background: neutral().dark().step_2(), panel_focused_border: blue().dark().step_8(), panel_indent_guide: neutral().dark_alpha().step_4(), @@ -287,6 +303,8 @@ impl ThemeColors { version_control_renamed: MODIFIED_COLOR, version_control_conflict: orange().dark().step_12(), version_control_ignored: gray().dark().step_12(), + version_control_word_added: WORD_ADDED_COLOR, + version_control_word_deleted: WORD_DELETED_COLOR, version_control_conflict_marker_ours: green().dark().step_10().alpha(0.5), version_control_conflict_marker_theirs: blue().dark().step_10().alpha(0.5), vim_normal_background: system.transparent, diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index ae120165f23095266cf92fd33a1cd1ccb88fe309..6bfcb1c86811136388eb5a557458f88c65d0ac09 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -71,11 +71,17 @@ pub(crate) fn zed_default_dark() -> Theme { let yellow = hsla(39. / 360., 67. / 100., 69. / 100., 1.0); const ADDED_COLOR: Hsla = Hsla { - h: 142. / 360., - s: 0.68, - l: 0.45, + h: 134. / 360., + s: 0.55, + l: 0.40, a: 1.0, }; + const WORD_ADDED_COLOR: Hsla = Hsla { + h: 134. / 360., + s: 0.55, + l: 0.40, + a: 0.35, + }; const MODIFIED_COLOR: Hsla = Hsla { h: 48. / 360., s: 0.76, @@ -83,11 +89,17 @@ pub(crate) fn zed_default_dark() -> Theme { a: 1.0, }; const REMOVED_COLOR: Hsla = Hsla { - h: 355. / 360., - s: 0.65, - l: 0.65, + h: 350. / 360., + s: 0.88, + l: 0.25, a: 1.0, }; + const WORD_DELETED_COLOR: Hsla = Hsla { + h: 350. / 360., + s: 0.88, + l: 0.25, + a: 0.80, + }; let player = PlayerColors::dark(); Theme { @@ -140,6 +152,7 @@ pub(crate) fn zed_default_dark() -> Theme { tab_inactive_background: bg, tab_active_background: editor, search_match_background: bg, + search_active_match_background: bg, editor_background: editor, editor_gutter_background: editor, @@ -231,6 +244,8 @@ pub(crate) fn zed_default_dark() -> Theme { version_control_renamed: MODIFIED_COLOR, version_control_conflict: crate::orange().light().step_12(), version_control_ignored: crate::gray().light().step_12(), + version_control_word_added: WORD_ADDED_COLOR, + version_control_word_deleted: WORD_DELETED_COLOR, version_control_conflict_marker_ours: crate::green().light().step_12().alpha(0.5), version_control_conflict_marker_theirs: crate::blue().light().step_12().alpha(0.5), diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index c3e7f3cfbc25cc04f05cd939f74154a732f16f58..818bf1b2f1b093729ed79fc0ed132cc3b3f74e5f 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -88,7 +88,9 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("coffeescript", &["coffee"]), ( "cpp", - &["c++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx"], + &[ + "c++", "h++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx", + ], ), ("crystal", &["cr", "ecr"]), ("csharp", &["cs"]), @@ -165,6 +167,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("nim", &["nim"]), ("nix", &["nix"]), ("ocaml", &["ml", "mli"]), + ("odin", &["odin"]), ("php", &["php"]), ( "prettier", @@ -330,6 +333,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("nim", "icons/file_icons/nim.svg"), ("nix", "icons/file_icons/nix.svg"), ("ocaml", "icons/file_icons/ocaml.svg"), + ("odin", "icons/file_icons/odin.svg"), ("phoenix", "icons/file_icons/phoenix.svg"), ("php", "icons/file_icons/php.svg"), ("prettier", "icons/file_icons/prettier.svg"), diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index c4ed624bf642e0820fd9187224f96e2acfa92018..f52b2cf0e50bc5d8b26de9457432aba9218a17b9 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -287,6 +287,15 @@ pub fn theme_colors_refinement( .panel_background .as_ref() .and_then(|color| try_parse_color(color).ok()); + let search_match_background = this + .search_match_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); + let search_active_match_background = this + .search_active_match_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(search_match_background); ThemeColorsRefinement { border, border_variant: this @@ -442,10 +451,8 @@ pub fn theme_colors_refinement( .tab_active_background .as_ref() .and_then(|color| try_parse_color(color).ok()), - search_match_background: this - .search_match_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), + search_match_background: search_match_background, + search_active_match_background: search_active_match_background, panel_background, panel_focused_border: this .panel_focused_border @@ -744,6 +751,14 @@ pub fn theme_colors_refinement( .and_then(|color| try_parse_color(color).ok()) // Fall back to `conflict`, for backwards compatibility. .or(status_colors.ignored), + version_control_word_added: this + .version_control_word_added + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + version_control_word_deleted: this + .version_control_word_deleted + .as_ref() + .and_then(|color| try_parse_color(color).ok()), #[allow(deprecated)] version_control_conflict_marker_ours: this .version_control_conflict_marker_ours diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index a753859d52677974902a36a5d67ea86611e47006..d60d4882a68412eeb10a95ba5d8540f5cbc87421 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -304,31 +304,50 @@ impl IconThemeSelection { } } -// impl ThemeSettingsContent { /// Sets the theme for the given appearance to the theme with the specified name. +/// +/// The caller should make sure that the [`Appearance`] matches the theme associated with the name. +/// +/// If the current [`ThemeAppearanceMode`] is set to [`System`] and the user's system [`Appearance`] +/// is different than the new theme's [`Appearance`], this function will update the +/// [`ThemeAppearanceMode`] to the new theme's appearance in order to display the new theme. +/// +/// [`System`]: ThemeAppearanceMode::System pub fn set_theme( current: &mut SettingsContent, theme_name: impl Into>, - appearance: Appearance, + theme_appearance: Appearance, + system_appearance: Appearance, ) { - if let Some(selection) = current.theme.theme.as_mut() { - let theme_to_update = match selection { - settings::ThemeSelection::Static(theme) => theme, - settings::ThemeSelection::Dynamic { mode, light, dark } => match mode { - ThemeAppearanceMode::Light => light, - ThemeAppearanceMode::Dark => dark, - ThemeAppearanceMode::System => match appearance { - Appearance::Light => light, - Appearance::Dark => dark, - }, - }, - }; + let theme_name = ThemeName(theme_name.into()); - *theme_to_update = ThemeName(theme_name.into()); - } else { - current.theme.theme = Some(settings::ThemeSelection::Static(ThemeName( - theme_name.into(), - ))); + let Some(selection) = current.theme.theme.as_mut() else { + current.theme.theme = Some(settings::ThemeSelection::Static(theme_name)); + return; + }; + + match selection { + settings::ThemeSelection::Static(theme) => { + *theme = theme_name; + } + settings::ThemeSelection::Dynamic { mode, light, dark } => { + // Update the appropriate theme slot based on appearance. + match theme_appearance { + Appearance::Light => *light = theme_name, + Appearance::Dark => *dark = theme_name, + } + + // Don't update the theme mode if it is set to system and the new theme has the same + // appearance. + let should_update_mode = + !(mode == &ThemeAppearanceMode::System && theme_appearance == system_appearance); + + if should_update_mode { + // Update the mode to the specified appearance (otherwise we might set the theme and + // nothing gets updated because the system specified the other mode appearance). + *mode = ThemeAppearanceMode::from(theme_appearance); + } + } } } diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 179d02b91684410bb641893e87759bd30cc73b36..905f2245e03ad7a8ce7a4eb8be6799e5ded379c4 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -128,6 +128,7 @@ pub struct ThemeColors { pub tab_inactive_background: Hsla, pub tab_active_background: Hsla, pub search_match_background: Hsla, + pub search_active_match_background: Hsla, pub panel_background: Hsla, pub panel_focused_border: Hsla, pub panel_indent_guide: Hsla, @@ -300,7 +301,10 @@ pub struct ThemeColors { pub version_control_conflict: Hsla, /// Represents an ignored entry in version control systems. pub version_control_ignored: Hsla, - + /// Represents an added word in a word diff. + pub version_control_word_added: Hsla, + /// Represents a deleted word in a word diff. + pub version_control_word_deleted: Hsla, /// Represents the "ours" region of a merge conflict. pub version_control_conflict_marker_ours: Hsla, /// Represents the "theirs" region of a merge conflict. @@ -349,6 +353,7 @@ pub enum ThemeColorField { TabInactiveBackground, TabActiveBackground, SearchMatchBackground, + SearchActiveMatchBackground, PanelBackground, PanelFocusedBorder, PanelIndentGuide, @@ -464,6 +469,7 @@ impl ThemeColors { ThemeColorField::TabInactiveBackground => self.tab_inactive_background, ThemeColorField::TabActiveBackground => self.tab_active_background, ThemeColorField::SearchMatchBackground => self.search_match_background, + ThemeColorField::SearchActiveMatchBackground => self.search_active_match_background, ThemeColorField::PanelBackground => self.panel_background, ThemeColorField::PanelFocusedBorder => self.panel_focused_border, ThemeColorField::PanelIndentGuide => self.panel_indent_guide, diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index aef975416f7698bfad0ba50de08269c9000a1dec..c94e0d60bf3fa561c8d49dbaf544f47fe60e7ea9 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -84,6 +84,15 @@ impl From for Appearance { } } +impl From for ThemeAppearanceMode { + fn from(value: Appearance) -> Self { + match value { + Appearance::Light => Self::Light, + Appearance::Dark => Self::Dark, + } + } +} + /// Which themes should be loaded. This is used primarily for testing. pub enum LoadThemes { /// Only load the base theme. diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index 38e7fc33f7b14f198679d0dd541c39cd444a71a3..74b242dd0b7c3a3ddbe6ca76d34a59f03560f14a 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -9,7 +9,10 @@ use gpui::{ use picker::{Picker, PickerDelegate}; use settings::{Settings, SettingsStore, update_settings_file}; use std::sync::Arc; -use theme::{Appearance, Theme, ThemeMeta, ThemeRegistry, ThemeSettings}; +use theme::{ + Appearance, SystemAppearance, Theme, ThemeAppearanceMode, ThemeMeta, ThemeName, ThemeRegistry, + ThemeSelection, ThemeSettings, +}; use ui::{ListItem, ListItemSpacing, prelude::*, v_flex}; use util::ResultExt; use workspace::{ModalView, Workspace, ui::HighlightedLabel, with_active_or_new_workspace}; @@ -114,7 +117,14 @@ struct ThemeSelectorDelegate { fs: Arc, themes: Vec, matches: Vec, - original_theme: Arc, + /// The theme that was selected before the `ThemeSelector` menu was opened. + /// + /// We use this to return back to theme that was set if the user dismisses the menu. + original_theme_settings: ThemeSettings, + /// The current system appearance. + original_system_appearance: Appearance, + /// The currently selected new theme. + new_theme: Arc, selection_completed: bool, selected_theme: Option>, selected_index: usize, @@ -129,6 +139,8 @@ impl ThemeSelectorDelegate { cx: &mut Context, ) -> Self { let original_theme = cx.theme().clone(); + let original_theme_settings = ThemeSettings::get_global(cx).clone(); + let original_system_appearance = SystemAppearance::global(cx).0; let registry = ThemeRegistry::global(cx); let mut themes = registry @@ -143,13 +155,15 @@ impl ThemeSelectorDelegate { }) .collect::>(); + // Sort by dark vs light, then by name. themes.sort_unstable_by(|a, b| { a.appearance .is_light() .cmp(&b.appearance.is_light()) .then(a.name.cmp(&b.name)) }); - let matches = themes + + let matches: Vec = themes .iter() .map(|meta| StringMatch { candidate_id: 0, @@ -158,19 +172,25 @@ impl ThemeSelectorDelegate { string: meta.name.to_string(), }) .collect(); - let mut this = Self { + + // The current theme is likely in this list, so default to first showing that. + let selected_index = matches + .iter() + .position(|mat| mat.string == original_theme.name) + .unwrap_or(0); + + Self { fs, themes, matches, - original_theme: original_theme.clone(), - selected_index: 0, + original_theme_settings, + original_system_appearance, + new_theme: original_theme, // Start with the original theme. + selected_index, selection_completed: false, selected_theme: None, selector, - }; - - this.select_if_matching(&original_theme.name); - this + } } fn show_selected_theme( @@ -179,9 +199,10 @@ impl ThemeSelectorDelegate { ) -> Option> { if let Some(mat) = self.matches.get(self.selected_index) { let registry = ThemeRegistry::global(cx); + match registry.get(&mat.string) { Ok(theme) => { - Self::set_theme(theme.clone(), cx); + self.set_theme(theme.clone(), cx); Some(theme) } Err(error) => { @@ -194,21 +215,122 @@ impl ThemeSelectorDelegate { } } - fn select_if_matching(&mut self, theme_name: &str) { - self.selected_index = self - .matches - .iter() - .position(|mat| mat.string == theme_name) - .unwrap_or(self.selected_index); - } - - fn set_theme(theme: Arc, cx: &mut App) { + fn set_theme(&mut self, new_theme: Arc, cx: &mut App) { + // Update the global (in-memory) theme settings. SettingsStore::update_global(cx, |store, _| { - let mut theme_settings = store.get::(None).clone(); - let name = theme.as_ref().name.clone().into(); - theme_settings.theme = theme::ThemeSelection::Static(theme::ThemeName(name)); - store.override_global(theme_settings); + override_global_theme( + store, + &new_theme, + &self.original_theme_settings.theme, + self.original_system_appearance, + ) }); + + self.new_theme = new_theme; + } +} + +/// Overrides the global (in-memory) theme settings. +/// +/// Note that this does **not** update the user's `settings.json` file (see the +/// [`ThemeSelectorDelegate::confirm`] method and [`theme::set_theme`] function). +fn override_global_theme( + store: &mut SettingsStore, + new_theme: &Theme, + original_theme: &ThemeSelection, + system_appearance: Appearance, +) { + let theme_name = ThemeName(new_theme.name.clone().into()); + let new_appearance = new_theme.appearance(); + let new_theme_is_light = new_appearance.is_light(); + + let mut curr_theme_settings = store.get::(None).clone(); + + match (original_theme, &curr_theme_settings.theme) { + // Override the currently selected static theme. + (ThemeSelection::Static(_), ThemeSelection::Static(_)) => { + curr_theme_settings.theme = ThemeSelection::Static(theme_name); + } + + // If the current theme selection is dynamic, then only override the global setting for the + // specific mode (light or dark). + ( + ThemeSelection::Dynamic { + mode: original_mode, + light: original_light, + dark: original_dark, + }, + ThemeSelection::Dynamic { .. }, + ) => { + let new_mode = update_mode_if_new_appearance_is_different_from_system( + original_mode, + system_appearance, + new_appearance, + ); + + let updated_theme = retain_original_opposing_theme( + new_theme_is_light, + new_mode, + theme_name, + original_light, + original_dark, + ); + + curr_theme_settings.theme = updated_theme; + } + + // The theme selection mode changed while selecting new themes (someone edited the settings + // file on disk while we had the dialogue open), so don't do anything. + _ => return, + }; + + store.override_global(curr_theme_settings); +} + +/// Helper function for determining the new [`ThemeAppearanceMode`] for the new theme. +/// +/// If the the original theme mode was [`System`] and the new theme's appearance matches the system +/// appearance, we don't need to change the mode setting. +/// +/// Otherwise, we need to change the mode in order to see the new theme. +/// +/// [`System`]: ThemeAppearanceMode::System +fn update_mode_if_new_appearance_is_different_from_system( + original_mode: &ThemeAppearanceMode, + system_appearance: Appearance, + new_appearance: Appearance, +) -> ThemeAppearanceMode { + if original_mode == &ThemeAppearanceMode::System && system_appearance == new_appearance { + ThemeAppearanceMode::System + } else { + ThemeAppearanceMode::from(new_appearance) + } +} + +/// Helper function for updating / displaying the [`ThemeSelection`] while using the theme selector. +/// +/// We want to retain the alternate theme selection of the original settings (before the menu was +/// opened), not the currently selected theme (which likely has changed multiple times while the +/// menu has been open). +fn retain_original_opposing_theme( + new_theme_is_light: bool, + new_mode: ThemeAppearanceMode, + theme_name: ThemeName, + original_light: &ThemeName, + original_dark: &ThemeName, +) -> ThemeSelection { + if new_theme_is_light { + ThemeSelection::Dynamic { + mode: new_mode, + light: theme_name, + dark: original_dark.clone(), + } + } else { + ThemeSelection::Dynamic { + mode: new_mode, + light: original_light.clone(), + dark: theme_name, + } } } @@ -225,19 +347,20 @@ impl PickerDelegate for ThemeSelectorDelegate { fn confirm( &mut self, - _: bool, - window: &mut Window, + _secondary: bool, + _window: &mut Window, cx: &mut Context>, ) { self.selection_completed = true; - let appearance = Appearance::from(window.appearance()); - let theme_name = ThemeSettings::get_global(cx).theme.name(appearance).0; + let theme_name: Arc = self.new_theme.name.as_str().into(); + let theme_appearance = self.new_theme.appearance; + let system_appearance = SystemAppearance::global(cx).0; telemetry::event!("Settings Changed", setting = "theme", value = theme_name); update_settings_file(self.fs.clone(), cx, move |settings, _| { - theme::set_theme(settings, theme_name.to_string(), appearance); + theme::set_theme(settings, theme_name, theme_appearance, system_appearance); }); self.selector @@ -249,7 +372,9 @@ impl PickerDelegate for ThemeSelectorDelegate { fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { if !self.selection_completed { - Self::set_theme(self.original_theme.clone(), cx); + SettingsStore::update_global(cx, |store, _| { + store.override_global(self.original_theme_settings.clone()); + }); self.selection_completed = true; } diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 01a12260ad03284d77dfda19fdf2286cf6196ca8..817b73c45ecd2df4a76e9a67f425b2b459c0c026 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -151,10 +151,10 @@ impl ApplicationMenu { // Application menu must have same ids as first menu item in standard menu div() - .id(SharedString::from(format!("{}-menu-item", menu_name))) + .id(format!("{}-menu-item", menu_name)) .occlude() .child( - PopoverMenu::new(SharedString::from(format!("{}-menu-popover", menu_name))) + PopoverMenu::new(format!("{}-menu-popover", menu_name)) .menu(move |window, cx| { Self::build_menu_from_items(entry.clone(), window, cx).into() }) @@ -184,10 +184,10 @@ impl ApplicationMenu { .collect(); div() - .id(SharedString::from(format!("{}-menu-item", menu_name))) + .id(format!("{}-menu-item", menu_name)) .occlude() .child( - PopoverMenu::new(SharedString::from(format!("{}-menu-popover", menu_name))) + PopoverMenu::new(format!("{}-menu-popover", menu_name)) .menu(move |window, cx| { Self::build_menu_from_items(entry.clone(), window, cx).into() }) diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 16a0389efa46429d91c79f4eb1e99f62d01753b5..8a2d23dd26f81da469fe229eeed586ea8fe49189 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -8,7 +8,7 @@ use gpui::{ AnyElement, Hsla, IntoElement, MouseButton, Path, ScreenCaptureSource, Styled, WeakEntity, canvas, point, }; -use gpui::{App, Task, Window, actions}; +use gpui::{App, Task, Window}; use project::WorktreeSettings; use rpc::proto::{self}; use settings::{Settings as _, SettingsLocation}; @@ -22,19 +22,7 @@ use workspace::notifications::DetachAndPromptErr; use crate::TitleBar; -actions!( - collab, - [ - /// Toggles screen sharing on or off. - ToggleScreenSharing, - /// Toggles microphone mute. - ToggleMute, - /// Toggles deafen mode (mute both microphone and speakers). - ToggleDeafen - ] -); - -fn toggle_screen_sharing( +pub fn toggle_screen_sharing( screen: anyhow::Result>>, window: &mut Window, cx: &mut App, @@ -90,7 +78,7 @@ fn toggle_screen_sharing( toggle_screen_sharing.detach_and_prompt_err("Sharing Screen Failed", window, cx, |e, _, _| Some(format!("{:?}\n\nPlease check that you have given Zed permissions to record your screen in Settings.", e))); } -fn toggle_mute(_: &ToggleMute, cx: &mut App) { +pub fn toggle_mute(cx: &mut App) { let call = ActiveCall::global(cx).read(cx); if let Some(room) = call.room().cloned() { room.update(cx, |room, cx| { @@ -110,7 +98,7 @@ fn toggle_mute(_: &ToggleMute, cx: &mut App) { } } -fn toggle_deafen(_: &ToggleDeafen, cx: &mut App) { +pub fn toggle_deafen(cx: &mut App) { if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() { room.update(cx, |room, cx| room.toggle_deafen(cx)); } @@ -182,7 +170,9 @@ impl TitleBar { this.children(current_user_face_pile.map(|face_pile| { v_flex() - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) + .on_mouse_down(MouseButton::Left, |_, window, _| { + window.prevent_default() + }) .child(face_pile) .child(render_color_ribbon(player_colors.local().cursor)) })) @@ -217,6 +207,9 @@ impl TitleBar { .child(facepile) .child(render_color_ribbon(player_color.cursor)) .cursor_pointer() + .on_mouse_down(MouseButton::Left, |_, window, _| { + window.prevent_default() + }) .on_click({ let peer_id = collaborator.peer_id; cx.listener(move |this, _, window, cx| { @@ -453,9 +446,7 @@ impl TitleBar { .icon_size(IconSize::Small) .toggle_state(is_muted) .selected_style(ButtonStyle::Tinted(TintColor::Error)) - .on_click(move |_, _window, cx| { - toggle_mute(&Default::default(), cx); - }) + .on_click(move |_, _window, cx| toggle_mute(cx)) .into_any_element(), ); } @@ -492,7 +483,7 @@ impl TitleBar { } } }) - .on_click(move |_, _, cx| toggle_deafen(&Default::default(), cx)) + .on_click(move |_, _, cx| toggle_deafen(cx)) .into_any_element(), ); diff --git a/crates/title_bar/src/platform_title_bar.rs b/crates/title_bar/src/platform_title_bar.rs index fd03e764629454411c9726ef7dcf055d54582d7e..6ce7d089bb4641e2c1b7da710ebb0841fc51da4c 100644 --- a/crates/title_bar/src/platform_title_bar.rs +++ b/crates/title_bar/src/platform_title_bar.rs @@ -77,6 +77,47 @@ impl Render for PlatformTitleBar { .window_control_area(WindowControlArea::Drag) .w_full() .h(height) + .map(|this| { + this.on_mouse_down_out(cx.listener(move |this, _ev, _window, _cx| { + this.should_move = false; + })) + .on_mouse_up( + gpui::MouseButton::Left, + cx.listener(move |this, _ev, _window, _cx| { + this.should_move = false; + }), + ) + .on_mouse_down( + gpui::MouseButton::Left, + cx.listener(move |this, _ev, _window, _cx| { + this.should_move = true; + }), + ) + .on_mouse_move(cx.listener(move |this, _ev, window, _| { + if this.should_move { + this.should_move = false; + window.start_window_move(); + } + })) + }) + .map(|this| { + // Note: On Windows the title bar behavior is handled by the platform implementation. + this.id(self.id.clone()) + .when(self.platform_style == PlatformStyle::Mac, |this| { + this.on_click(|event, window, _| { + if event.click_count() == 2 { + window.titlebar_double_click(); + } + }) + }) + .when(self.platform_style == PlatformStyle::Linux, |this| { + this.on_click(|event, window, _| { + if event.click_count() == 2 { + window.zoom_window(); + } + }) + }) + }) .map(|this| { if window.is_fullscreen() { this.pl_2() @@ -112,21 +153,6 @@ impl Render for PlatformTitleBar { .justify_between() .overflow_x_hidden() .w_full() - // Note: On Windows the title bar behavior is handled by the platform implementation. - .when(self.platform_style == PlatformStyle::Mac, |this| { - this.on_click(|event, window, _| { - if event.click_count() == 2 { - window.titlebar_double_click(); - } - }) - }) - .when(self.platform_style == PlatformStyle::Linux, |this| { - this.on_click(|event, window, _| { - if event.click_count() == 2 { - window.zoom_window(); - } - }) - }) .children(children), ) .when(!window.is_fullscreen(), |title_bar| { @@ -142,27 +168,6 @@ impl Render for PlatformTitleBar { window.show_window_menu(ev.position) }) }) - .on_mouse_move(cx.listener(move |this, _ev, window, _| { - if this.should_move { - this.should_move = false; - window.start_window_move(); - } - })) - .on_mouse_down_out(cx.listener(move |this, _ev, _window, _cx| { - this.should_move = false; - })) - .on_mouse_up( - MouseButton::Left, - cx.listener(move |this, _ev, _window, _cx| { - this.should_move = false; - }), - ) - .on_mouse_down( - MouseButton::Left, - cx.listener(move |this, _ev, _window, _cx| { - this.should_move = true; - }), - ) } else { title_bar } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 0652744a36b2a9e0b09347553a6d16f6c5344dbe..23572677919509d859a141cb09cce8f5822697ef 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -1,5 +1,5 @@ mod application_menu; -mod collab; +pub mod collab; mod onboarding_banner; pub mod platform_title_bar; mod platforms; @@ -30,18 +30,20 @@ use gpui::{ Subscription, WeakEntity, Window, actions, div, }; use onboarding_banner::OnboardingBanner; -use project::{Project, WorktreeSettings, git_store::GitStoreEvent}; +use project::{ + Project, WorktreeSettings, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees, +}; use remote::RemoteConnectionOptions; use settings::{Settings, SettingsLocation}; use std::sync::Arc; use theme::ActiveTheme; use title_bar_settings::TitleBarSettings; use ui::{ - Avatar, Button, ButtonLike, ButtonStyle, Chip, ContextMenu, Icon, IconName, IconSize, - IconWithIndicator, Indicator, PopoverMenu, PopoverMenuHandle, Tooltip, h_flex, prelude::*, + Avatar, ButtonLike, Chip, ContextMenu, IconWithIndicator, Indicator, PopoverMenu, + PopoverMenuHandle, TintColor, Tooltip, prelude::*, }; use util::{ResultExt, rel_path::RelPath}; -use workspace::{Workspace, notifications::NotifyResultExt}; +use workspace::{ToggleWorktreeSecurity, Workspace, notifications::NotifyResultExt}; use zed_actions::{OpenRecent, OpenRemote}; pub use onboarding_banner::restore_banner; @@ -163,11 +165,12 @@ impl Render for TitleBar { title_bar .when(title_bar_settings.show_project_items, |title_bar| { title_bar + .children(self.render_restricted_mode(cx)) .children(self.render_project_host(cx)) .child(self.render_project_name(cx)) }) .when(title_bar_settings.show_branch_name, |title_bar| { - title_bar.children(self.render_project_branch(cx)) + title_bar.children(self.render_project_repo(cx)) }) }) }) @@ -202,9 +205,11 @@ impl Render for TitleBar { .children(self.render_connection_status(status, cx)) .when( user.is_none() && TitleBarSettings::get_global(cx).show_sign_in, - |el| el.child(self.render_sign_in_button(cx)), + |this| this.child(self.render_sign_in_button(cx)), ) - .child(self.render_app_menu_button(cx)) + .when(TitleBarSettings::get_global(cx).show_user_menu, |this| { + this.child(self.render_user_menu_button(cx)) + }) .into_any_element(), ); @@ -289,7 +294,12 @@ impl TitleBar { _ => {} }), ); - subscriptions.push(cx.observe(&user_store, |_, _, cx| cx.notify())); + subscriptions.push(cx.observe(&user_store, |_a, _, cx| cx.notify())); + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + subscriptions.push(cx.subscribe(&trusted_worktrees, |_, _, _, cx| { + cx.notify(); + })); + } let banner = cx.new(|cx| { OnboardingBanner::new( @@ -315,20 +325,47 @@ impl TitleBar { client, _subscriptions: subscriptions, banner, - screen_share_popover_handle: Default::default(), + screen_share_popover_handle: PopoverMenuHandle::default(), } } + fn project_name(&self, cx: &Context) -> Option { + self.project + .read(cx) + .visible_worktrees(cx) + .map(|worktree| { + let worktree = worktree.read(cx); + let settings_location = SettingsLocation { + worktree_id: worktree.id(), + path: RelPath::empty(), + }; + + let settings = WorktreeSettings::get(Some(settings_location), cx); + let name = match &settings.project_name { + Some(name) => name.as_str(), + None => worktree.root_name_str(), + }; + SharedString::new(name) + }) + .next() + } + fn render_remote_project_connection(&self, cx: &mut Context) -> Option { let options = self.project.read(cx).remote_connection_options(cx)?; let host: SharedString = options.display_name().into(); - let (nickname, icon) = match options { - RemoteConnectionOptions::Ssh(options) => { - (options.nickname.map(|nick| nick.into()), IconName::Server) + let (nickname, tooltip_title, icon) = match options { + RemoteConnectionOptions::Ssh(options) => ( + options.nickname.map(|nick| nick.into()), + "Remote Project", + IconName::Server, + ), + RemoteConnectionOptions::Wsl(_) => (None, "Remote Project", IconName::Linux), + RemoteConnectionOptions::Docker(_dev_container_connection) => { + (None, "Dev Container", IconName::Box) } - RemoteConnectionOptions::Wsl(_) => (None, IconName::Linux), }; + let nickname = nickname.unwrap_or_else(|| host.clone()); let (indicator_color, meta) = match self.project.read(cx).remote_connection_state(cx)? { @@ -375,7 +412,7 @@ impl TitleBar { ) .tooltip(move |_window, cx| { Tooltip::with_meta( - "Remote Project", + tooltip_title, Some(&OpenRemote { from_existing_connection: false, create_new_window: false, @@ -398,6 +435,48 @@ impl TitleBar { ) } + pub fn render_restricted_mode(&self, cx: &mut Context) -> Option { + let has_restricted_worktrees = TrustedWorktrees::try_get_global(cx) + .map(|trusted_worktrees| { + trusted_worktrees + .read(cx) + .has_restricted_worktrees(&self.project.read(cx).worktree_store(), cx) + }) + .unwrap_or(false); + if !has_restricted_worktrees { + return None; + } + + Some( + Button::new("restricted_mode_trigger", "Restricted Mode") + .style(ButtonStyle::Tinted(TintColor::Warning)) + .label_size(LabelSize::Small) + .color(Color::Warning) + .icon(IconName::Warning) + .icon_color(Color::Warning) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .tooltip(|_, cx| { + Tooltip::with_meta( + "You're in Restricted Mode", + Some(&ToggleWorktreeSecurity), + "Mark this project as trusted and unlock all features", + cx, + ) + }) + .on_click({ + cx.listener(move |this, _, window, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.show_worktree_trust_security_modal(true, window, cx) + }) + .log_err(); + }) + }) + .into_any_element(), + ) + } + pub fn render_project_host(&self, cx: &mut Context) -> Option { if self.project.read(cx).is_via_remote_server() { return self.render_remote_project_connection(cx); @@ -445,29 +524,12 @@ impl TitleBar { } pub fn render_project_name(&self, cx: &mut Context) -> impl IntoElement { - let name = self - .project - .read(cx) - .visible_worktrees(cx) - .map(|worktree| { - let worktree = worktree.read(cx); - let settings_location = SettingsLocation { - worktree_id: worktree.id(), - path: RelPath::empty(), - }; - - let settings = WorktreeSettings::get(Some(settings_location), cx); - match &settings.project_name { - Some(name) => name.as_str(), - None => worktree.root_name_str(), - } - }) - .next(); + let name = self.project_name(cx); let is_project_selected = name.is_some(); let name = if let Some(name) = name { - util::truncate_and_trailoff(name, MAX_PROJECT_NAME_LENGTH) + util::truncate_and_trailoff(&name, MAX_PROJECT_NAME_LENGTH) } else { - "Open recent project".to_string() + "Open Recent Project".to_string() }; Button::new("project_name_trigger", name) @@ -494,9 +556,10 @@ impl TitleBar { })) } - pub fn render_project_branch(&self, cx: &mut Context) -> Option { + pub fn render_project_repo(&self, cx: &mut Context) -> Option { let settings = TitleBarSettings::get_global(cx); let repository = self.project.read(cx).active_repository(cx)?; + let repository_count = self.project.read(cx).repositories(cx).len(); let workspace = self.workspace.upgrade()?; let repo = repository.read(cx); let branch_name = repo @@ -513,6 +576,19 @@ impl TitleBar { .collect::() }) })?; + let project_name = self.project_name(cx); + let repo_name = repo + .work_directory_abs_path + .file_name() + .and_then(|name| name.to_str()) + .map(SharedString::new); + let show_repo_name = + repository_count > 1 && repo.branch.is_some() && repo_name != project_name; + let branch_name = if let Some(repo_name) = repo_name.filter(|_| show_repo_name) { + format!("{repo_name}/{branch_name}") + } else { + branch_name + }; Some( Button::new("project_branch_trigger", branch_name) @@ -529,7 +605,7 @@ impl TitleBar { }) .on_click(move |_, window, cx| { let _ = workspace.update(cx, |this, cx| { - window.focus(&this.active_pane().focus_handle(cx)); + window.focus(&this.active_pane().focus_handle(cx), cx); window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); }); }) @@ -661,7 +737,7 @@ impl TitleBar { }) } - pub fn render_app_menu_button(&mut self, cx: &mut Context) -> impl Element { + pub fn render_user_menu_button(&mut self, cx: &mut Context) -> impl Element { let user_store = self.user_store.read(cx); let user = user_store.current_user(); diff --git a/crates/title_bar/src/title_bar_settings.rs b/crates/title_bar/src/title_bar_settings.rs index 29fae4d31eb33ac70a22c21010f09350847439c2..155b7b7bc797567927a70b12c677372cb92c9453 100644 --- a/crates/title_bar/src/title_bar_settings.rs +++ b/crates/title_bar/src/title_bar_settings.rs @@ -8,6 +8,7 @@ pub struct TitleBarSettings { pub show_branch_name: bool, pub show_project_items: bool, pub show_sign_in: bool, + pub show_user_menu: bool, pub show_menus: bool, } @@ -21,6 +22,7 @@ impl Settings for TitleBarSettings { show_branch_name: content.show_branch_name.unwrap(), show_project_items: content.show_project_items.unwrap(), show_sign_in: content.show_sign_in.unwrap(), + show_user_menu: content.show_user_menu.unwrap(), show_menus: content.show_menus.unwrap(), } } diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index 122aa9f22b74c33dd8f148f2bf3b65f04da478a9..03c152e3fd3df0c62ab2f5c7e4a4746875ac955a 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -124,7 +124,7 @@ impl ActiveToolchain { &buffer, window, |this, _, event: &BufferEvent, window, cx| { - if matches!(event, BufferEvent::LanguageChanged) { + if matches!(event, BufferEvent::LanguageChanged(_)) { this._update_toolchain_task = Self::spawn_tracker_task(window, cx); } }, diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index c017483a32325d13e85a5db34566a3b0bf6e15a5..f7262c248f15f0f68fcd7a903ee01cac6b22d0af 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -128,67 +128,61 @@ impl AddToolchainState { ) -> (OpenPathDelegate, oneshot::Receiver>>) { let (tx, rx) = oneshot::channel(); let weak = cx.weak_entity(); - let path_style = project.read(cx).path_style(cx); - let lister = - OpenPathDelegate::new(tx, DirectoryLister::Project(project), false, path_style) - .show_hidden() - .with_footer(Arc::new(move |_, cx| { - let error = weak - .read_with(cx, |this, _| { - if let AddState::Path { error, .. } = &this.state { - error.clone() - } else { - None + let lister = OpenPathDelegate::new(tx, DirectoryLister::Project(project), false, cx) + .show_hidden() + .with_footer(Arc::new(move |_, cx| { + let error = weak + .read_with(cx, |this, _| { + if let AddState::Path { error, .. } = &this.state { + error.clone() + } else { + None + } + }) + .ok() + .flatten(); + let is_loading = weak + .read_with(cx, |this, _| { + matches!( + this.state, + AddState::Path { + input_state: PathInputState::Resolving(_), + .. } - }) - .ok() - .flatten(); - let is_loading = weak - .read_with(cx, |this, _| { - matches!( - this.state, - AddState::Path { - input_state: PathInputState::Resolving(_), - .. - } - ) - }) - .unwrap_or_default(); - Some( - v_flex() - .child(Divider::horizontal()) - .child( - h_flex() - .p_1() - .justify_between() - .gap_2() - .child( - Label::new("Select Toolchain Path") - .color(Color::Muted) - .map(|this| { - if is_loading { - this.with_animation( - "select-toolchain-label", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between( - 0.4, 0.8, - )), - |label, delta| label.alpha(delta), - ) - .into_any() - } else { - this.into_any_element() - } - }), - ) - .when_some(error, |this, error| { - this.child(Label::new(error).color(Color::Error)) - }), - ) - .into_any(), - ) - })); + ) + }) + .unwrap_or_default(); + Some( + v_flex() + .child(Divider::horizontal()) + .child( + h_flex() + .p_1() + .justify_between() + .gap_2() + .child(Label::new("Select Toolchain Path").color(Color::Muted).map( + |this| { + if is_loading { + this.with_animation( + "select-toolchain-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.alpha(delta), + ) + .into_any() + } else { + this.into_any_element() + } + }, + )) + .when_some(error, |this, error| { + this.child(Label::new(error).color(Color::Error)) + }), + ) + .into_any(), + ) + })); (lister, rx) } @@ -231,7 +225,7 @@ impl AddToolchainState { ); }); *input_state = Self::wait_for_path(rx, window, cx); - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); } }); return Err(anyhow::anyhow!("Failed to resolve toolchain")); @@ -266,7 +260,7 @@ impl AddToolchainState { toolchain, scope_picker, }; - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); }); Result::<_, anyhow::Error>::Ok(()) @@ -339,7 +333,7 @@ impl AddToolchainState { }); _ = self.weak.update(cx, |this, cx| { this.state = State::Search((this.create_search_state)(window, cx)); - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); cx.notify(); }); } @@ -389,7 +383,7 @@ impl Render for AddToolchainState { &weak, |this: &mut ToolchainSelector, _: &menu::Cancel, window, cx| { this.state = State::Search((this.create_search_state)(window, cx)); - this.state.focus_handle(cx).focus(window); + this.state.focus_handle(cx).focus(window, cx); cx.notify(); }, )) @@ -588,19 +582,20 @@ impl ToolchainSelector { .worktree_for_id(worktree_id, cx)? .read(cx) .abs_path(); - let workspace_id = workspace.database_id()?; let weak = workspace.weak_handle(); cx.spawn_in(window, async move |workspace, cx| { - let active_toolchain = workspace::WORKSPACE_DB - .toolchain( - workspace_id, - worktree_id, - relative_path.clone(), - language_name.clone(), - ) - .await - .ok() - .flatten(); + let active_toolchain = project + .read_with(cx, |this, cx| { + this.active_toolchain( + ProjectPath { + worktree_id, + path: relative_path.clone(), + }, + language_name.clone(), + cx, + ) + })? + .await; workspace .update_in(cx, |this, window, cx| { this.toggle_modal(window, cx, move |window, cx| { @@ -618,6 +613,7 @@ impl ToolchainSelector { }); }) .ok(); + anyhow::Ok(()) }) .detach(); @@ -707,7 +703,7 @@ impl ToolchainSelector { window, cx, )); - self.state.focus_handle(cx).focus(window); + self.state.focus_handle(cx).focus(window, cx); cx.notify(); } } @@ -876,7 +872,7 @@ impl ToolchainSelectorDelegate { .strip_prefix(&worktree_root) .ok() .and_then(|suffix| suffix.to_str()) - .map(|suffix| format!(".{}{suffix}", path_style.separator()).into()) + .map(|suffix| format!(".{}{suffix}", path_style.primary_separator()).into()) .unwrap_or(path) } } diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index fae444c0ef81d0f7b631769112f4286f8e75ea23..c08e46c5882cf3c9e0a8e205c8b23224d3a7a8e1 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -1,5 +1,5 @@ +mod ai; mod avatar; -mod badge; mod banner; mod button; mod callout; @@ -7,6 +7,7 @@ mod chip; mod content_group; mod context_menu; mod data_table; +mod diff_stat; mod disclosure; mod divider; mod dropdown_menu; @@ -35,6 +36,7 @@ mod stack; mod sticky_items; mod tab; mod tab_bar; +mod thread_item; mod toggle; mod tooltip; mod tree_view_item; @@ -42,8 +44,8 @@ mod tree_view_item; #[cfg(feature = "stories")] mod stories; +pub use ai::*; pub use avatar::*; -pub use badge::*; pub use banner::*; pub use button::*; pub use callout::*; @@ -51,6 +53,7 @@ pub use chip::*; pub use content_group::*; pub use context_menu::*; pub use data_table::*; +pub use diff_stat::*; pub use disclosure::*; pub use divider::*; pub use dropdown_menu::*; @@ -79,6 +82,7 @@ pub use stack::*; pub use sticky_items::*; pub use tab::*; pub use tab_bar::*; +pub use thread_item::*; pub use toggle::*; pub use tooltip::*; pub use tree_view_item::*; diff --git a/crates/ui/src/components/ai.rs b/crates/ui/src/components/ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..e36361b7b06559c1442b86acf26b6694bb950d82 --- /dev/null +++ b/crates/ui/src/components/ai.rs @@ -0,0 +1,3 @@ +mod configured_api_card; + +pub use configured_api_card::*; diff --git a/crates/language_models/src/ui/configured_api_card.rs b/crates/ui/src/components/ai/configured_api_card.rs similarity index 84% rename from crates/language_models/src/ui/configured_api_card.rs rename to crates/ui/src/components/ai/configured_api_card.rs index 063ac1717f3aa5de1a448e26c94df7530fec588f..37f9ac7602d676906565a911f1bbca6d2b40f755 100644 --- a/crates/language_models/src/ui/configured_api_card.rs +++ b/crates/ui/src/components/ai/configured_api_card.rs @@ -1,10 +1,11 @@ +use crate::{Tooltip, prelude::*}; use gpui::{ClickEvent, IntoElement, ParentElement, SharedString}; -use ui::{Tooltip, prelude::*}; #[derive(IntoElement)] pub struct ConfiguredApiCard { label: SharedString, button_label: Option, + button_tab_index: Option, tooltip_label: Option, disabled: bool, on_click: Option>, @@ -15,6 +16,7 @@ impl ConfiguredApiCard { Self { label: label.into(), button_label: None, + button_tab_index: None, tooltip_label: None, disabled: false, on_click: None, @@ -43,6 +45,11 @@ impl ConfiguredApiCard { self.disabled = disabled; self } + + pub fn button_tab_index(mut self, tab_index: isize) -> Self { + self.button_tab_index = Some(tab_index); + self + } } impl RenderOnce for ConfiguredApiCard { @@ -51,23 +58,27 @@ impl RenderOnce for ConfiguredApiCard { let button_id = SharedString::new(format!("id-{}", button_label)); h_flex() + .min_w_0() .mt_0p5() .p_1() .justify_between() .rounded_md() + .flex_wrap() .border_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().background) .child( h_flex() - .flex_1() .min_w_0() .gap_1() .child(Icon::new(IconName::Check).color(Color::Success)) - .child(Label::new(self.label).truncate()), + .child(Label::new(self.label)), ) .child( Button::new(button_id, button_label) + .when_some(self.button_tab_index, |elem, tab_index| { + elem.tab_index(tab_index) + }) .label_size(LabelSize::Small) .icon(IconName::Undo) .icon_size(IconSize::Small) diff --git a/crates/languages/src/tsx/highlights-jsx.scm b/crates/ui/src/components/ai/copilot_configuration_callout.rs similarity index 100% rename from crates/languages/src/tsx/highlights-jsx.scm rename to crates/ui/src/components/ai/copilot_configuration_callout.rs diff --git a/crates/ui/src/components/avatar.rs b/crates/ui/src/components/avatar.rs index 19f7c4660bc64e756950df6f5ab0e19192f4096b..7b2ba8ce5cbfee2589695c1a9d0dcd61a266b093 100644 --- a/crates/ui/src/components/avatar.rs +++ b/crates/ui/src/components/avatar.rs @@ -91,7 +91,18 @@ impl RenderOnce for Avatar { self.image .size(image_size) .rounded_full() - .bg(cx.theme().colors().ghost_element_background), + .bg(cx.theme().colors().element_disabled) + .with_fallback(|| { + h_flex() + .size_full() + .justify_center() + .child( + Icon::new(IconName::Person) + .color(Color::Muted) + .size(IconSize::Small), + ) + .into_any_element() + }), ) .children(self.indicator.map(|indicator| div().child(indicator))) } diff --git a/crates/ui/src/components/badge.rs b/crates/ui/src/components/badge.rs deleted file mode 100644 index 9db6fd616f56769b03d1856cfda3fdeef66e446f..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/badge.rs +++ /dev/null @@ -1,94 +0,0 @@ -use std::rc::Rc; - -use crate::Divider; -use crate::DividerColor; -use crate::Tooltip; -use crate::component_prelude::*; -use crate::prelude::*; -use gpui::AnyView; -use gpui::{AnyElement, IntoElement, SharedString, Window}; - -#[derive(IntoElement, RegisterComponent)] -pub struct Badge { - label: SharedString, - icon: IconName, - tooltip: Option AnyView>>, -} - -impl Badge { - pub fn new(label: impl Into) -> Self { - Self { - label: label.into(), - icon: IconName::Check, - tooltip: None, - } - } - - pub fn icon(mut self, icon: IconName) -> Self { - self.icon = icon; - self - } - - pub fn tooltip(mut self, tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static) -> Self { - self.tooltip = Some(Rc::new(tooltip)); - self - } -} - -impl RenderOnce for Badge { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let tooltip = self.tooltip; - - h_flex() - .id(self.label.clone()) - .h_full() - .gap_1() - .pl_1() - .pr_2() - .border_1() - .border_color(cx.theme().colors().border.opacity(0.6)) - .bg(cx.theme().colors().element_background) - .rounded_sm() - .overflow_hidden() - .child( - Icon::new(self.icon) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child(Divider::vertical().color(DividerColor::Border)) - .child(Label::new(self.label.clone()).size(LabelSize::Small).ml_1()) - .when_some(tooltip, |this, tooltip| { - this.hoverable_tooltip(move |window, cx| tooltip(window, cx)) - }) - } -} - -impl Component for Badge { - fn scope() -> ComponentScope { - ComponentScope::DataDisplay - } - - fn description() -> Option<&'static str> { - Some( - "A compact, labeled component with optional icon for displaying status, categories, or metadata.", - ) - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - Some( - v_flex() - .gap_6() - .child(single_example( - "Basic Badge", - Badge::new("Default").into_any_element(), - )) - .child(single_example( - "With Tooltip", - Badge::new("Tooltip") - .tooltip(Tooltip::text("This is a tooltip.")) - .into_any_element(), - )) - .into_any_element(), - ) - } -} diff --git a/crates/ui/src/components/button.rs b/crates/ui/src/components/button.rs index 23e7702f6241b6ca0d4074936ee20da26531fbed..d56a9c09d3b57ba607b6837b16af31d240e58663 100644 --- a/crates/ui/src/components/button.rs +++ b/crates/ui/src/components/button.rs @@ -1,12 +1,14 @@ mod button; mod button_icon; mod button_like; +mod button_link; mod icon_button; mod split_button; mod toggle_button; pub use button::*; pub use button_like::*; +pub use button_link::*; pub use icon_button::*; pub use split_button::*; pub use toggle_button::*; diff --git a/crates/ui/src/components/button/button_link.rs b/crates/ui/src/components/button/button_link.rs new file mode 100644 index 0000000000000000000000000000000000000000..caffe2772bce394be6899b1f9b3b686c3927a530 --- /dev/null +++ b/crates/ui/src/components/button/button_link.rs @@ -0,0 +1,102 @@ +use gpui::{IntoElement, Window, prelude::*}; + +use crate::{ButtonLike, prelude::*}; + +/// A button that takes an underline to look like a regular web link. +/// It also contains an arrow icon to communicate the link takes you out of Zed. +/// +/// # Usage Example +/// +/// ``` +/// use ui::ButtonLink; +/// +/// let button_link = ButtonLink::new("Click me", "https://example.com"); +/// ``` +#[derive(IntoElement, RegisterComponent)] +pub struct ButtonLink { + label: SharedString, + label_size: LabelSize, + label_color: Color, + link: String, + no_icon: bool, +} + +impl ButtonLink { + pub fn new(label: impl Into, link: impl Into) -> Self { + Self { + link: link.into(), + label: label.into(), + label_size: LabelSize::Default, + label_color: Color::Default, + no_icon: false, + } + } + + pub fn no_icon(mut self, no_icon: bool) -> Self { + self.no_icon = no_icon; + self + } + + pub fn label_size(mut self, label_size: LabelSize) -> Self { + self.label_size = label_size; + self + } + + pub fn label_color(mut self, label_color: Color) -> Self { + self.label_color = label_color; + self + } +} + +impl RenderOnce for ButtonLink { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let id = format!("{}-{}", self.label, self.link); + + ButtonLike::new(id) + .size(ButtonSize::None) + .child( + h_flex() + .gap_0p5() + .child( + Label::new(self.label) + .size(self.label_size) + .color(self.label_color) + .underline(), + ) + .when(!self.no_icon, |this| { + this.child( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Small) + .color(Color::Muted), + ) + }), + ) + .on_click(move |_, _, cx| cx.open_url(&self.link)) + .into_any_element() + } +} + +impl Component for ButtonLink { + fn scope() -> ComponentScope { + ComponentScope::Navigation + } + + fn description() -> Option<&'static str> { + Some("A button that opens a URL.") + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + Some( + v_flex() + .gap_6() + .child( + example_group(vec![single_example( + "Simple", + ButtonLink::new("zed.dev", "https://zed.dev").into_any_element(), + )]) + .vertical(), + ) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/button/split_button.rs b/crates/ui/src/components/button/split_button.rs index 14b9fd153cd5ad662467c75ff81700587667cee3..48f06ff3789e69b6d19cde2322932f4bd6e89f97 100644 --- a/crates/ui/src/components/button/split_button.rs +++ b/crates/ui/src/components/button/split_button.rs @@ -4,7 +4,7 @@ use gpui::{ }; use theme::ActiveTheme; -use crate::{ElevationIndex, h_flex}; +use crate::{ElevationIndex, IconButton, h_flex}; use super::ButtonLike; @@ -15,6 +15,23 @@ pub enum SplitButtonStyle { Transparent, } +pub enum SplitButtonKind { + ButtonLike(ButtonLike), + IconButton(IconButton), +} + +impl From for SplitButtonKind { + fn from(icon_button: IconButton) -> Self { + Self::IconButton(icon_button) + } +} + +impl From for SplitButtonKind { + fn from(button_like: ButtonLike) -> Self { + Self::ButtonLike(button_like) + } +} + /// /// A button with two parts: a primary action on the left and a secondary action on the right. /// /// The left side is a [`ButtonLike`] with the main action, while the right side can contain @@ -23,15 +40,15 @@ pub enum SplitButtonStyle { /// The two sections are visually separated by a divider, but presented as a unified control. #[derive(IntoElement)] pub struct SplitButton { - pub left: ButtonLike, - pub right: AnyElement, + left: SplitButtonKind, + right: AnyElement, style: SplitButtonStyle, } impl SplitButton { - pub fn new(left: ButtonLike, right: AnyElement) -> Self { + pub fn new(left: impl Into, right: AnyElement) -> Self { Self { - left, + left: left.into(), right, style: SplitButtonStyle::Filled, } @@ -56,7 +73,10 @@ impl RenderOnce for SplitButton { this.border_1() .border_color(cx.theme().colors().border.opacity(0.8)) }) - .child(div().flex_grow().child(self.left)) + .child(div().flex_grow().child(match self.left { + SplitButtonKind::ButtonLike(button) => button.into_any_element(), + SplitButtonKind::IconButton(icon) => icon.into_any_element(), + })) .child( div() .h_full() diff --git a/crates/ui/src/components/button/toggle_button.rs b/crates/ui/src/components/button/toggle_button.rs index 2a3db701d15d12361ebe623d8d56fa35ae0016a7..5cecfef0625648d9fc5cc3a2b143a0ea114b3def 100644 --- a/crates/ui/src/components/button/toggle_button.rs +++ b/crates/ui/src/components/button/toggle_button.rs @@ -2,7 +2,7 @@ use std::rc::Rc; use gpui::{AnyView, ClickEvent, relative}; -use crate::{ButtonLike, ButtonLikeRounding, ElevationIndex, TintColor, Tooltip, prelude::*}; +use crate::{ButtonLike, ButtonLikeRounding, TintColor, Tooltip, prelude::*}; /// The position of a [`ToggleButton`] within a group of buttons. #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -43,290 +43,6 @@ impl ToggleButtonPosition { } } -#[derive(IntoElement, RegisterComponent)] -pub struct ToggleButton { - base: ButtonLike, - position_in_group: Option, - label: SharedString, - label_color: Option, -} - -impl ToggleButton { - pub fn new(id: impl Into, label: impl Into) -> Self { - Self { - base: ButtonLike::new(id), - position_in_group: None, - label: label.into(), - label_color: None, - } - } - - pub fn color(mut self, label_color: impl Into>) -> Self { - self.label_color = label_color.into(); - self - } - - pub fn position_in_group(mut self, position: ToggleButtonPosition) -> Self { - self.position_in_group = Some(position); - self - } - - pub fn first(self) -> Self { - self.position_in_group(ToggleButtonPosition::HORIZONTAL_FIRST) - } - - pub fn middle(self) -> Self { - self.position_in_group(ToggleButtonPosition::HORIZONTAL_MIDDLE) - } - - pub fn last(self) -> Self { - self.position_in_group(ToggleButtonPosition::HORIZONTAL_LAST) - } -} - -impl Toggleable for ToggleButton { - fn toggle_state(mut self, selected: bool) -> Self { - self.base = self.base.toggle_state(selected); - self - } -} - -impl SelectableButton for ToggleButton { - fn selected_style(mut self, style: ButtonStyle) -> Self { - self.base.selected_style = Some(style); - self - } -} - -impl FixedWidth for ToggleButton { - fn width(mut self, width: impl Into) -> Self { - self.base.width = Some(width.into()); - self - } - - fn full_width(mut self) -> Self { - self.base.width = Some(relative(1.)); - self - } -} - -impl Disableable for ToggleButton { - fn disabled(mut self, disabled: bool) -> Self { - self.base = self.base.disabled(disabled); - self - } -} - -impl Clickable for ToggleButton { - fn on_click(mut self, handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static) -> Self { - self.base = self.base.on_click(handler); - self - } - - fn cursor_style(mut self, cursor_style: gpui::CursorStyle) -> Self { - self.base = self.base.cursor_style(cursor_style); - self - } -} - -impl ButtonCommon for ToggleButton { - fn id(&self) -> &ElementId { - self.base.id() - } - - fn style(mut self, style: ButtonStyle) -> Self { - self.base = self.base.style(style); - self - } - - fn size(mut self, size: ButtonSize) -> Self { - self.base = self.base.size(size); - self - } - - fn tooltip(mut self, tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static) -> Self { - self.base = self.base.tooltip(tooltip); - self - } - - fn tab_index(mut self, tab_index: impl Into) -> Self { - self.base = self.base.tab_index(tab_index); - self - } - - fn layer(mut self, elevation: ElevationIndex) -> Self { - self.base = self.base.layer(elevation); - self - } - - fn track_focus(mut self, focus_handle: &gpui::FocusHandle) -> Self { - self.base = self.base.track_focus(focus_handle); - self - } -} - -impl RenderOnce for ToggleButton { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { - let is_disabled = self.base.disabled; - let is_selected = self.base.selected; - - let label_color = if is_disabled { - Color::Disabled - } else if is_selected { - Color::Selected - } else { - self.label_color.unwrap_or_default() - }; - - self.base - .when_some(self.position_in_group, |this, position| { - this.rounding(position.to_rounding()) - }) - .child( - Label::new(self.label) - .color(label_color) - .line_height_style(LineHeightStyle::UiLabel), - ) - } -} - -impl Component for ToggleButton { - fn scope() -> ComponentScope { - ComponentScope::Input - } - - fn sort_name() -> &'static str { - "ButtonC" - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - Some( - v_flex() - .gap_6() - .children(vec![ - example_group_with_title( - "Button Styles", - vec![ - single_example( - "Off", - ToggleButton::new("off", "Off") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .into_any_element(), - ), - single_example( - "On", - ToggleButton::new("on", "On") - .layer(ElevationIndex::Background) - .toggle_state(true) - .style(ButtonStyle::Filled) - .into_any_element(), - ), - single_example( - "Off – Disabled", - ToggleButton::new("disabled_off", "Disabled Off") - .layer(ElevationIndex::Background) - .disabled(true) - .style(ButtonStyle::Filled) - .into_any_element(), - ), - single_example( - "On – Disabled", - ToggleButton::new("disabled_on", "Disabled On") - .layer(ElevationIndex::Background) - .disabled(true) - .toggle_state(true) - .style(ButtonStyle::Filled) - .into_any_element(), - ), - ], - ), - example_group_with_title( - "Button Group", - vec![ - single_example( - "Three Buttons", - h_flex() - .child( - ToggleButton::new("three_btn_first", "First") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .first() - .into_any_element(), - ) - .child( - ToggleButton::new("three_btn_middle", "Middle") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .middle() - .toggle_state(true) - .into_any_element(), - ) - .child( - ToggleButton::new("three_btn_last", "Last") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .last() - .into_any_element(), - ) - .into_any_element(), - ), - single_example( - "Two Buttons", - h_flex() - .child( - ToggleButton::new("two_btn_first", "First") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .first() - .into_any_element(), - ) - .child( - ToggleButton::new("two_btn_last", "Last") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .last() - .into_any_element(), - ) - .into_any_element(), - ), - ], - ), - example_group_with_title( - "Alternate Sizes", - vec![ - single_example( - "None", - ToggleButton::new("none", "None") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .size(ButtonSize::None) - .into_any_element(), - ), - single_example( - "Compact", - ToggleButton::new("compact", "Compact") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .size(ButtonSize::Compact) - .into_any_element(), - ), - single_example( - "Large", - ToggleButton::new("large", "Large") - .layer(ElevationIndex::Background) - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .into_any_element(), - ), - ], - ), - ]) - .into_any_element(), - ) - } -} - pub struct ButtonConfiguration { label: SharedString, icon: Option, @@ -447,6 +163,8 @@ pub enum ToggleButtonGroupStyle { pub enum ToggleButtonGroupSize { Default, Medium, + Large, + Custom(Rems), } #[derive(IntoElement)] @@ -458,7 +176,9 @@ where rows: [[T; COLS]; ROWS], style: ToggleButtonGroupStyle, size: ToggleButtonGroupSize, + label_size: LabelSize, group_width: Option, + auto_width: bool, selected_index: usize, tab_index: Option, } @@ -470,7 +190,9 @@ impl ToggleButtonGroup { rows: [buttons], style: ToggleButtonGroupStyle::Transparent, size: ToggleButtonGroupSize::Default, + label_size: LabelSize::Small, group_width: None, + auto_width: false, selected_index: 0, tab_index: None, } @@ -488,7 +210,9 @@ impl ToggleButtonGroup { rows: [first_row, second_row], style: ToggleButtonGroupStyle::Transparent, size: ToggleButtonGroupSize::Default, + label_size: LabelSize::Small, group_width: None, + auto_width: false, selected_index: 0, tab_index: None, } @@ -511,6 +235,18 @@ impl ToggleButtonGroup Self { + self.auto_width = true; + self + } + + pub fn label_size(mut self, label_size: LabelSize) -> Self { + self.label_size = label_size; + self + } + /// Sets the tab index for the toggle button group. /// The tab index is set to the initial value provided, then the /// value is incremented by the number of buttons in the group. @@ -543,6 +279,11 @@ impl RenderOnce for ToggleButtonGroup { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let custom_height = match self.size { + ToggleButtonGroupSize::Custom(height) => Some(height), + _ => None, + }; + let entries = self.rows.into_iter().enumerate().map(|(row_index, row)| { let group_name = self.group_name.clone(); @@ -558,7 +299,7 @@ impl RenderOnce let entry_index = row_index * COLS + col_index; ButtonLike::new((group_name.clone(), entry_index)) - .full_width() + .when(!self.auto_width, |this| this.full_width()) .rounding(Some( ToggleButtonPosition { leftmost: col_index == 0, @@ -581,13 +322,17 @@ impl RenderOnce .when(self.size == ToggleButtonGroupSize::Medium, |button| { button.size(ButtonSize::Medium) }) + .when(self.size == ToggleButtonGroupSize::Large, |button| { + button.size(ButtonSize::Large) + }) + .when_some(custom_height, |button, height| button.height(height.into())) .child( h_flex() .w_full() + .px_2() .gap_1p5() - .px_3() - .py_1() .justify_center() + .flex_none() .when_some(icon, |this, icon| { this.py_2() .child(Icon::new(icon).size(IconSize::XSmall).map(|this| { @@ -598,7 +343,7 @@ impl RenderOnce } })) }) - .child(Label::new(label).size(LabelSize::Small).when( + .child(Label::new(label).size(self.label_size).when( entry_index == self.selected_index || selected, |this| this.color(Color::Accent), )), @@ -620,6 +365,8 @@ impl RenderOnce .map(|this| { if let Some(width) = self.group_width { this.w(width) + } else if self.auto_width { + this } else { this.w_full() } @@ -646,7 +393,7 @@ impl RenderOnce .when(is_outlined_or_filled && !last_item, |this| { this.border_r_1().border_color(border_color) }) - .w(Self::button_width()) + .when(!self.auto_width, |this| this.w(Self::button_width())) .overflow_hidden() .child(item) })) diff --git a/crates/ui/src/components/callout.rs b/crates/ui/src/components/callout.rs index b5d1d7f25531cc956388da9d4a977bdfd14204b9..4eb849d7f640aca78b70645f5f93301281ca6627 100644 --- a/crates/ui/src/components/callout.rs +++ b/crates/ui/src/components/callout.rs @@ -30,6 +30,7 @@ pub struct Callout { icon: Option, title: Option, description: Option, + description_slot: Option, actions_slot: Option, dismiss_action: Option, line_height: Option, @@ -44,6 +45,7 @@ impl Callout { icon: None, title: None, description: None, + description_slot: None, actions_slot: None, dismiss_action: None, line_height: None, @@ -76,6 +78,13 @@ impl Callout { self } + /// Allows for any element—like markdown elements—to fill the description slot of the callout. + /// This method wins over `description` if both happen to be set. + pub fn description_slot(mut self, description: impl IntoElement) -> Self { + self.description_slot = Some(description.into_any_element()); + self + } + /// Sets the primary call-to-action button. pub fn actions_slot(mut self, action: impl IntoElement) -> Self { self.actions_slot = Some(action.into_any_element()); @@ -179,15 +188,27 @@ impl RenderOnce for Callout { ) }), ) - .when_some(self.description, |this, description| { - this.child( - div() - .w_full() - .flex_1() - .text_ui_sm(cx) - .text_color(cx.theme().colors().text_muted) - .child(description), - ) + .map(|this| { + if let Some(description_slot) = self.description_slot { + this.child( + div() + .w_full() + .flex_1() + .text_ui_sm(cx) + .child(description_slot), + ) + } else if let Some(description) = self.description { + this.child( + div() + .w_full() + .flex_1() + .text_ui_sm(cx) + .text_color(cx.theme().colors().text_muted) + .child(description), + ) + } else { + this + } }), ) } diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index a4bae647408f860ec8425266a26efc173099f225..756a2a9364193d6f1cdace8ed8c92cecf401a864 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -562,7 +562,7 @@ impl ContextMenu { action: Some(action.boxed_clone()), handler: Rc::new(move |context, window, cx| { if let Some(context) = &context { - window.focus(context); + window.focus(context, cx); } window.dispatch_action(action.boxed_clone(), cx); }), @@ -594,7 +594,7 @@ impl ContextMenu { action: Some(action.boxed_clone()), handler: Rc::new(move |context, window, cx| { if let Some(context) = &context { - window.focus(context); + window.focus(context, cx); } window.dispatch_action(action.boxed_clone(), cx); }), diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 4a1f4939cca2eb85bb7a549d06af1e9ea8cf04d0..9cd2a5cb7a0d802d170fcfbe6a812027c779d942 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -485,6 +485,7 @@ pub struct Table { interaction_state: Option>, col_widths: Option>, map_row: Option), &mut Window, &mut App) -> AnyElement>>, + use_ui_font: bool, empty_table_callback: Option AnyElement>>, } @@ -498,6 +499,7 @@ impl Table { rows: TableContents::Vec(Vec::new()), interaction_state: None, map_row: None, + use_ui_font: true, empty_table_callback: None, col_widths: None, } @@ -590,6 +592,11 @@ impl Table { self } + pub fn no_ui_font(mut self) -> Self { + self.use_ui_font = false; + self + } + pub fn map_row( mut self, callback: impl Fn((usize, Stateful
), &mut Window, &mut App) -> AnyElement + 'static, @@ -618,8 +625,8 @@ fn base_cell_style(width: Option) -> Div { .overflow_hidden() } -fn base_cell_style_text(width: Option, cx: &App) -> Div { - base_cell_style(width).text_ui(cx) +fn base_cell_style_text(width: Option, use_ui_font: bool, cx: &App) -> Div { + base_cell_style(width).when(use_ui_font, |el| el.text_ui(cx)) } pub fn render_table_row( @@ -641,11 +648,10 @@ pub fn render_table_row( .map_or([None; COLS], |widths| widths.map(Some)); let mut row = h_flex() - .h_full() .id(("table_row", row_index)) - .w_full() - .justify_between() + .size_full() .when_some(bg, |row, bg| row.bg(bg)) + .hover(|s| s.bg(cx.theme().colors().element_hover.opacity(0.6))) .when(!is_striped, |row| { row.border_b_1() .border_color(transparent_black()) @@ -657,7 +663,12 @@ pub fn render_table_row( .map(IntoElement::into_any_element) .into_iter() .zip(column_widths) - .map(|(cell, width)| base_cell_style_text(width, cx).px_1().py_0p5().child(cell)), + .map(|(cell, width)| { + base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() + .child(cell) + }), ); let row = if let Some(map_row) = table_context.map_row { @@ -701,7 +712,7 @@ pub fn render_table_header( .border_color(cx.theme().colors().border) .children(headers.into_iter().enumerate().zip(column_widths).map( |((header_idx, h), width)| { - base_cell_style_text(width, cx) + base_cell_style_text(width, table_context.use_ui_font, cx) .child(h) .id(ElementId::NamedInteger( shared_element_id.clone(), @@ -740,6 +751,7 @@ pub struct TableRenderContext { pub total_row_count: usize, pub column_widths: Option<[Length; COLS]>, pub map_row: Option), &mut Window, &mut App) -> AnyElement>>, + pub use_ui_font: bool, } impl TableRenderContext { @@ -749,6 +761,7 @@ impl TableRenderContext { total_row_count: table.rows.len(), column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)), map_row: table.map_row.clone(), + use_ui_font: table.use_ui_font, } } } @@ -873,7 +886,7 @@ impl RenderOnce for Table { interaction_state.as_ref(), |this, state| { this.track_scroll( - state.read_with(cx, |s, _| s.scroll_handle.clone()), + &state.read_with(cx, |s, _| s.scroll_handle.clone()), ) }, ), @@ -907,7 +920,7 @@ impl RenderOnce for Table { .unwrap_or_else(|| Scrollbars::new(super::ScrollAxes::Both)); content .custom_scrollbars( - scrollbars.tracked_scroll_handle(state.read(cx).scroll_handle.clone()), + scrollbars.tracked_scroll_handle(&state.read(cx).scroll_handle), window, cx, ) diff --git a/crates/ui/src/components/diff_stat.rs b/crates/ui/src/components/diff_stat.rs new file mode 100644 index 0000000000000000000000000000000000000000..2606963555c682d9d949d19d57471e02c53351d7 --- /dev/null +++ b/crates/ui/src/components/diff_stat.rs @@ -0,0 +1,85 @@ +use crate::prelude::*; + +#[derive(IntoElement, RegisterComponent)] +pub struct DiffStat { + id: ElementId, + added: usize, + removed: usize, +} + +impl DiffStat { + pub fn new(id: impl Into, added: usize, removed: usize) -> Self { + Self { + id: id.into(), + added, + removed, + } + } +} + +impl RenderOnce for DiffStat { + fn render(self, _: &mut Window, _cx: &mut App) -> impl IntoElement { + h_flex() + .id(self.id) + .gap_1() + .child( + h_flex() + .gap_0p5() + .child( + Icon::new(IconName::Plus) + .size(IconSize::XSmall) + .color(Color::Success), + ) + .child( + Label::new(self.added.to_string()) + .color(Color::Success) + .size(LabelSize::Small), + ), + ) + .child( + h_flex() + .gap_0p5() + .child( + Icon::new(IconName::Dash) + .size(IconSize::XSmall) + .color(Color::Error), + ) + .child( + Label::new(self.removed.to_string()) + .color(Color::Error) + .size(LabelSize::Small), + ), + ) + } +} + +impl Component for DiffStat { + fn scope() -> ComponentScope { + ComponentScope::VersionControl + } + + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let container = || { + h_flex() + .py_4() + .w_72() + .justify_center() + .border_1() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().panel_background) + }; + + let diff_stat_example = vec![single_example( + "Default", + container() + .child(DiffStat::new("id", 1, 2)) + .into_any_element(), + )]; + + Some( + example_group(diff_stat_example) + .vertical() + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/divider.rs b/crates/ui/src/components/divider.rs index 98eb45fd1dc1845284d63952eac684790d73bec4..5ad2187cfae36f3cc45cbecb42f115f0742abed4 100644 --- a/crates/ui/src/components/divider.rs +++ b/crates/ui/src/components/divider.rs @@ -1,4 +1,4 @@ -use gpui::{Hsla, IntoElement}; +use gpui::{Hsla, IntoElement, PathBuilder, canvas, point}; use crate::prelude::*; @@ -59,15 +59,6 @@ pub struct Divider { inset: bool, } -impl RenderOnce for Divider { - fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { - match self.style { - DividerStyle::Solid => self.render_solid(cx).into_any_element(), - DividerStyle::Dashed => self.render_dashed(cx).into_any_element(), - } - } -} - impl Divider { pub fn horizontal() -> Self { Self { @@ -115,49 +106,60 @@ impl Divider { self } - pub fn render_solid(self, cx: &mut App) -> impl IntoElement { - div() - .map(|this| match self.direction { - DividerDirection::Horizontal => { - this.h_px().w_full().when(self.inset, |this| this.mx_1p5()) - } - DividerDirection::Vertical => { - this.w_px().h_full().when(self.inset, |this| this.my_1p5()) - } - }) - .bg(self.color.hsla(cx)) + pub fn render_solid(self, base: Div, cx: &mut App) -> impl IntoElement { + base.bg(self.color.hsla(cx)) } - // TODO: Use canvas or a shader here - // This obviously is a short term approach - pub fn render_dashed(self, cx: &mut App) -> impl IntoElement { - let segment_count = 128; - let segment_count_f = segment_count as f32; - let segment_min_w = 6.; + pub fn render_dashed(self, base: Div) -> impl IntoElement { + base.relative().child( + canvas( + |_, _, _| {}, + move |bounds, _, window, cx| { + let mut builder = PathBuilder::stroke(px(1.)).dash_array(&[px(4.), px(2.)]); + let (start, end) = match self.direction { + DividerDirection::Horizontal => { + let x = bounds.origin.x; + let y = bounds.origin.y + px(0.5); + (point(x, y), point(x + bounds.size.width, y)) + } + DividerDirection::Vertical => { + let x = bounds.origin.x + px(0.5); + let y = bounds.origin.y; + (point(x, y), point(x, y + bounds.size.height)) + } + }; + builder.move_to(start); + builder.line_to(end); + if let Ok(line) = builder.build() { + window.paint_path(line, self.color.hsla(cx)); + } + }, + ) + .absolute() + .size_full(), + ) + } +} + +impl RenderOnce for Divider { + fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { let base = match self.direction { - DividerDirection::Horizontal => h_flex(), - DividerDirection::Vertical => v_flex(), - }; - let (w, h) = match self.direction { - DividerDirection::Horizontal => (px(segment_min_w), px(1.)), - DividerDirection::Vertical => (px(1.), px(segment_min_w)), + DividerDirection::Horizontal => div() + .min_w_0() + .h_px() + .w_full() + .when(self.inset, |this| this.mx_1p5()), + DividerDirection::Vertical => div() + .min_w_0() + .w_px() + .h_full() + .when(self.inset, |this| this.my_1p5()), }; - let color = self.color.hsla(cx); - let total_min_w = segment_min_w * segment_count_f * 2.; // * 2 because of the gap - - base.min_w(px(total_min_w)) - .map(|this| { - if self.direction == DividerDirection::Horizontal { - this.w_full().h_px() - } else { - this.w_px().h_full() - } - }) - .gap(px(segment_min_w)) - .overflow_hidden() - .children( - (0..segment_count).map(|_| div().flex_grow().flex_shrink_0().w(w).h(h).bg(color)), - ) + + match self.style { + DividerStyle::Solid => self.render_solid(base, cx).into_any_element(), + DividerStyle::Dashed => self.render_dashed(base).into_any_element(), + } } } @@ -232,6 +234,7 @@ impl Component for Divider { vec![single_example( "Between Content", v_flex() + .w_full() .gap_4() .px_4() .child(Label::new("Section One")) diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index cc43db7904e4d9c6328c44c275d96cccbce7ec8c..1c8e36ec18d6184b38eb6772e8f5a13be181ae00 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -302,33 +302,43 @@ impl Component for Icon { .children(vec![ example_group_with_title( "Sizes", - vec![ - single_example("Default", Icon::new(IconName::Star).into_any_element()), - single_example( - "Small", - Icon::new(IconName::Star) - .size(IconSize::Small) - .into_any_element(), - ), - single_example( - "Large", - Icon::new(IconName::Star) - .size(IconSize::XLarge) - .into_any_element(), - ), - ], + vec![single_example( + "XSmall, Small, Default, Large", + h_flex() + .gap_1() + .child( + Icon::new(IconName::Star) + .size(IconSize::XSmall) + .into_any_element(), + ) + .child( + Icon::new(IconName::Star) + .size(IconSize::Small) + .into_any_element(), + ) + .child(Icon::new(IconName::Star).into_any_element()) + .child( + Icon::new(IconName::Star) + .size(IconSize::XLarge) + .into_any_element(), + ) + .into_any_element(), + )], ), example_group_with_title( "Colors", - vec![ - single_example("Default", Icon::new(IconName::Bell).into_any_element()), - single_example( - "Custom Color", - Icon::new(IconName::Bell) - .color(Color::Error) - .into_any_element(), - ), - ], + vec![single_example( + "Default & Custom", + h_flex() + .gap_1() + .child(Icon::new(IconName::Star).into_any_element()) + .child( + Icon::new(IconName::Star) + .color(Color::Error) + .into_any_element(), + ) + .into_any_element(), + )], ), example_group_with_title( "All Icons", @@ -341,14 +351,14 @@ impl Component for Icon { .children(::iter().map( |icon_name| { h_flex() + .p_1() .gap_1() .border_1() - .rounded_md() - .px_2() - .py_1() - .border_color(Color::Muted.color(cx)) - .child(SharedString::new_static(icon_name.into())) + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().element_disabled) + .rounded_sm() .child(Icon::new(icon_name).into_any_element()) + .child(SharedString::new_static(icon_name.into())) }, )) .into_any_element(), diff --git a/crates/ui/src/components/image.rs b/crates/ui/src/components/image.rs index 8a14cffd3b2de2e184fd87a9212775c470e3118d..3e8cbd8fff7ba484b1b1eb9fd50da55998d7eae9 100644 --- a/crates/ui/src/components/image.rs +++ b/crates/ui/src/components/image.rs @@ -115,6 +115,8 @@ impl Component for Vector { } fn preview(_window: &mut Window, _cx: &mut App) -> Option { + let size = rems_from_px(60.); + Some( v_flex() .gap_6() @@ -124,11 +126,18 @@ impl Component for Vector { vec![ single_example( "Default", - Vector::square(VectorName::ZedLogo, rems(8.)).into_any_element(), + Vector::square(VectorName::ZedLogo, size).into_any_element(), ), single_example( "Custom Size", - Vector::new(VectorName::ZedLogo, rems(12.), rems(6.)) + h_flex() + .h(rems_from_px(120.)) + .justify_center() + .child(Vector::new( + VectorName::ZedLogo, + rems_from_px(120.), + rems_from_px(200.), + )) .into_any_element(), ), ], @@ -138,13 +147,13 @@ impl Component for Vector { vec![ single_example( "Accent Color", - Vector::square(VectorName::ZedLogo, rems(8.)) + Vector::square(VectorName::ZedLogo, size) .color(Color::Accent) .into_any_element(), ), single_example( "Error Color", - Vector::square(VectorName::ZedLogo, rems(8.)) + Vector::square(VectorName::ZedLogo, size) .color(Color::Error) .into_any_element(), ), @@ -152,17 +161,11 @@ impl Component for Vector { ), example_group_with_title( "Different Vectors", - vec![ - single_example( - "Zed Logo", - Vector::square(VectorName::ZedLogo, rems(8.)).into_any_element(), - ), - single_example( - "Zed X Copilot", - Vector::square(VectorName::ZedXCopilot, rems(8.)) - .into_any_element(), - ), - ], + vec![single_example( + "Zed X Copilot", + Vector::square(VectorName::ZedXCopilot, rems_from_px(100.)) + .into_any_element(), + )], ), ]) .into_any_element(), diff --git a/crates/ui/src/components/keybinding_hint.rs b/crates/ui/src/components/keybinding_hint.rs index c998e29f0ed6f5bccab976b11080320d4d65a7dd..7c19953ca43c907070829f7140f97a4fde495b57 100644 --- a/crates/ui/src/components/keybinding_hint.rs +++ b/crates/ui/src/components/keybinding_hint.rs @@ -234,9 +234,7 @@ impl RenderOnce for KeybindingHint { let mut base = h_flex(); - base.text_style() - .get_or_insert_with(Default::default) - .font_style = Some(FontStyle::Italic); + base.text_style().font_style = Some(FontStyle::Italic); base.gap_1() .font_buffer(cx) diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index 1fa6b14c83d8359df234f33ecb9318c88e3a2714..31fb7bfd88f1343ac6145c86f228bdcbd6a22e10 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -223,11 +223,9 @@ impl RenderOnce for LabelLike { }) .when(self.italic, |this| this.italic()) .when(self.underline, |mut this| { - this.text_style() - .get_or_insert_with(Default::default) - .underline = Some(UnderlineStyle { + this.text_style().underline = Some(UnderlineStyle { thickness: px(1.), - color: None, + color: Some(cx.theme().colors().text_muted.opacity(0.4)), wavy: false, }); this diff --git a/crates/ui/src/components/label/spinner_label.rs b/crates/ui/src/components/label/spinner_label.rs index de88e9bb7ab04a3d595183513c2b00da70e172aa..33eeeae125106cd8c8d2db64605b7017121c0394 100644 --- a/crates/ui/src/components/label/spinner_label.rs +++ b/crates/ui/src/components/label/spinner_label.rs @@ -77,7 +77,7 @@ impl SpinnerLabel { let duration = variant.duration(); SpinnerLabel { - base: Label::new(frames[0]), + base: Label::new(frames[0]).color(Color::Muted), variant, frames, duration, @@ -164,7 +164,7 @@ impl RenderOnce for SpinnerLabel { let frames = self.frames.clone(); let duration = self.duration; - self.base.color(Color::Muted).with_animation( + self.base.with_animation( self.variant.animation_id(), Animation::new(duration).repeat(), move |mut label, delta| { diff --git a/crates/ui/src/components/list/list.rs b/crates/ui/src/components/list/list.rs index b6950f06a4449265cccd48f9f13590650619a01c..ccae5bed23d9509ea6d4989f84620d444499245b 100644 --- a/crates/ui/src/components/list/list.rs +++ b/crates/ui/src/components/list/list.rs @@ -1,14 +1,15 @@ +use component::{Component, ComponentScope, example_group_with_title, single_example}; use gpui::AnyElement; use smallvec::SmallVec; -use crate::{Label, ListHeader, prelude::*, v_flex}; +use crate::{Label, ListHeader, ListItem, prelude::*}; pub enum EmptyMessage { Text(SharedString), Element(AnyElement), } -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct List { /// Message to display when the list is empty /// Defaults to "No items" @@ -92,3 +93,50 @@ impl RenderOnce for List { }) } } + +impl Component for List { + fn scope() -> ComponentScope { + ComponentScope::Layout + } + + fn description() -> Option<&'static str> { + Some( + "A container component for displaying a collection of list items with optional header and empty state.", + ) + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + Some( + v_flex() + .gap_6() + .children(vec![example_group_with_title( + "Basic Lists", + vec![ + single_example( + "Simple List", + List::new() + .child(ListItem::new("item1").child(Label::new("Item 1"))) + .child(ListItem::new("item2").child(Label::new("Item 2"))) + .child(ListItem::new("item3").child(Label::new("Item 3"))) + .into_any_element(), + ), + single_example( + "With Header", + List::new() + .header(ListHeader::new("Section Header")) + .child(ListItem::new("item1").child(Label::new("Item 1"))) + .child(ListItem::new("item2").child(Label::new("Item 2"))) + .into_any_element(), + ), + single_example( + "Empty List", + List::new() + .empty_message("No items to display") + .into_any_element(), + ), + ], + )]) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/list/list_bullet_item.rs b/crates/ui/src/components/list/list_bullet_item.rs index 9ac2095b5757d90bd22496052b806f41a5f8d163..934f0853dbe18b8231e15073766b6c84c1896546 100644 --- a/crates/ui/src/components/list/list_bullet_item.rs +++ b/crates/ui/src/components/list/list_bullet_item.rs @@ -1,17 +1,33 @@ -use crate::{ListItem, prelude::*}; +use crate::{ButtonLink, ListItem, prelude::*}; +use component::{Component, ComponentScope, example_group, single_example}; use gpui::{IntoElement, ParentElement, SharedString}; -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct ListBulletItem { label: SharedString, + label_color: Option, + children: Vec, } impl ListBulletItem { pub fn new(label: impl Into) -> Self { Self { label: label.into(), + label_color: None, + children: Vec::new(), } } + + pub fn label_color(mut self, color: Color) -> Self { + self.label_color = Some(color); + self + } +} + +impl ParentElement for ListBulletItem { + fn extend(&mut self, elements: impl IntoIterator) { + self.children.extend(elements) + } } impl RenderOnce for ListBulletItem { @@ -33,8 +49,67 @@ impl RenderOnce for ListBulletItem { .color(Color::Hidden), ), ) - .child(div().w_full().min_w_0().child(Label::new(self.label))), + .map(|this| { + if !self.children.is_empty() { + this.child(h_flex().gap_0p5().flex_wrap().children(self.children)) + } else { + this.child( + div().w_full().min_w_0().child( + Label::new(self.label) + .color(self.label_color.unwrap_or(Color::Default)), + ), + ) + } + }), ) .into_any_element() } } + +impl Component for ListBulletItem { + fn scope() -> ComponentScope { + ComponentScope::DataDisplay + } + + fn description() -> Option<&'static str> { + Some("A list item with a dash indicator for unordered lists.") + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + let basic_examples = vec![ + single_example( + "Simple", + ListBulletItem::new("First bullet item").into_any_element(), + ), + single_example( + "Multiple Lines", + v_flex() + .child(ListBulletItem::new("First item")) + .child(ListBulletItem::new("Second item")) + .child(ListBulletItem::new("Third item")) + .into_any_element(), + ), + single_example( + "Long Text", + ListBulletItem::new( + "A longer bullet item that demonstrates text wrapping behavior", + ) + .into_any_element(), + ), + single_example( + "With Link", + ListBulletItem::new("") + .child(Label::new("Create a Zed account by")) + .child(ButtonLink::new("visiting the website", "https://zed.dev")) + .into_any_element(), + ), + ]; + + Some( + v_flex() + .gap_6() + .child(example_group(basic_examples).vertical()) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index d59af07fa5271c070fca8433156b94301cc134aa..8726dca50dada193b3051f14b6609a373fc60730 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -1,11 +1,12 @@ use std::sync::Arc; -use crate::{Disclosure, Label, h_flex, prelude::*}; +use crate::{Disclosure, prelude::*}; +use component::{Component, ComponentScope, example_group_with_title, single_example}; use gpui::{AnyElement, ClickEvent}; use settings::Settings; use theme::ThemeSettings; -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct ListHeader { /// The label of the header. label: SharedString, @@ -138,3 +139,80 @@ impl RenderOnce for ListHeader { ) } } + +impl Component for ListHeader { + fn scope() -> ComponentScope { + ComponentScope::DataDisplay + } + + fn description() -> Option<&'static str> { + Some( + "A header component for lists with support for icons, actions, and collapsible sections.", + ) + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + Some( + v_flex() + .gap_6() + .children(vec![ + example_group_with_title( + "Basic Headers", + vec![ + single_example( + "Simple", + ListHeader::new("Section Header").into_any_element(), + ), + single_example( + "With Icon", + ListHeader::new("Files") + .start_slot(Icon::new(IconName::File)) + .into_any_element(), + ), + single_example( + "With End Slot", + ListHeader::new("Recent") + .end_slot(Label::new("5").color(Color::Muted)) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "Collapsible Headers", + vec![ + single_example( + "Expanded", + ListHeader::new("Expanded Section") + .toggle(Some(true)) + .into_any_element(), + ), + single_example( + "Collapsed", + ListHeader::new("Collapsed Section") + .toggle(Some(false)) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "States", + vec![ + single_example( + "Selected", + ListHeader::new("Selected Header") + .toggle_state(true) + .into_any_element(), + ), + single_example( + "Inset", + ListHeader::new("Inset Header") + .inset(true) + .into_any_element(), + ), + ], + ), + ]) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index a58291438a1d10bb1b61149f412151375b6b0a1f..d581fad9453d9812f17b7bc9e0297fb9927c8188 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use component::{Component, ComponentScope, example_group_with_title, single_example}; use gpui::{AnyElement, AnyView, ClickEvent, MouseButton, MouseDownEvent, Pixels, px}; use smallvec::SmallVec; @@ -13,7 +14,7 @@ pub enum ListItemSpacing { Sparse, } -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct ListItem { id: ElementId, group_name: Option, @@ -355,3 +356,115 @@ impl RenderOnce for ListItem { ) } } + +impl Component for ListItem { + fn scope() -> ComponentScope { + ComponentScope::DataDisplay + } + + fn description() -> Option<&'static str> { + Some( + "A flexible list item component with support for icons, actions, disclosure toggles, and hierarchical display.", + ) + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + Some( + v_flex() + .gap_6() + .children(vec![ + example_group_with_title( + "Basic List Items", + vec![ + single_example( + "Simple", + ListItem::new("simple") + .child(Label::new("Simple list item")) + .into_any_element(), + ), + single_example( + "With Icon", + ListItem::new("with_icon") + .start_slot(Icon::new(IconName::File)) + .child(Label::new("List item with icon")) + .into_any_element(), + ), + single_example( + "Selected", + ListItem::new("selected") + .toggle_state(true) + .start_slot(Icon::new(IconName::Check)) + .child(Label::new("Selected item")) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "List Item Spacing", + vec![ + single_example( + "Dense", + ListItem::new("dense") + .spacing(ListItemSpacing::Dense) + .child(Label::new("Dense spacing")) + .into_any_element(), + ), + single_example( + "Extra Dense", + ListItem::new("extra_dense") + .spacing(ListItemSpacing::ExtraDense) + .child(Label::new("Extra dense spacing")) + .into_any_element(), + ), + single_example( + "Sparse", + ListItem::new("sparse") + .spacing(ListItemSpacing::Sparse) + .child(Label::new("Sparse spacing")) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "With Slots", + vec![ + single_example( + "End Slot", + ListItem::new("end_slot") + .child(Label::new("Item with end slot")) + .end_slot(Icon::new(IconName::ChevronRight)) + .into_any_element(), + ), + single_example( + "With Toggle", + ListItem::new("with_toggle") + .toggle(Some(true)) + .child(Label::new("Expandable item")) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "States", + vec![ + single_example( + "Disabled", + ListItem::new("disabled") + .disabled(true) + .child(Label::new("Disabled item")) + .into_any_element(), + ), + single_example( + "Non-selectable", + ListItem::new("non_selectable") + .selectable(false) + .child(Label::new("Non-selectable item")) + .into_any_element(), + ), + ], + ), + ]) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/list/list_sub_header.rs b/crates/ui/src/components/list/list_sub_header.rs index e6f5abfe0ab5ee4f7e10a85c3d2b15402df8fc53..b4a82fb2edf5fcfbe068dee2570884698b8b0663 100644 --- a/crates/ui/src/components/list/list_sub_header.rs +++ b/crates/ui/src/components/list/list_sub_header.rs @@ -1,7 +1,7 @@ use crate::prelude::*; -use crate::{Icon, IconName, IconSize, Label, h_flex}; +use component::{Component, ComponentScope, example_group_with_title, single_example}; -#[derive(IntoElement)] +#[derive(IntoElement, RegisterComponent)] pub struct ListSubHeader { label: SharedString, start_slot: Option, @@ -85,3 +85,65 @@ impl RenderOnce for ListSubHeader { ) } } + +impl Component for ListSubHeader { + fn scope() -> ComponentScope { + ComponentScope::DataDisplay + } + + fn description() -> Option<&'static str> { + Some( + "A sub-header component for organizing list content into subsections with optional icons and end slots.", + ) + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + Some( + v_flex() + .gap_6() + .children(vec![ + example_group_with_title( + "Basic Sub-headers", + vec![ + single_example( + "Simple", + ListSubHeader::new("Subsection").into_any_element(), + ), + single_example( + "With Icon", + ListSubHeader::new("Documents") + .left_icon(Some(IconName::File)) + .into_any_element(), + ), + single_example( + "With End Slot", + ListSubHeader::new("Recent") + .end_slot( + Label::new("3").color(Color::Muted).into_any_element(), + ) + .into_any_element(), + ), + ], + ), + example_group_with_title( + "States", + vec![ + single_example( + "Selected", + ListSubHeader::new("Selected") + .toggle_state(true) + .into_any_element(), + ), + single_example( + "Inset", + ListSubHeader::new("Inset Sub-header") + .inset(true) + .into_any_element(), + ), + ], + ), + ]) + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index a70f5e1ea5a53a043086f3e102878f3614990d6e..85565f54885a06ddf2bc84f3639ca00fc4acc50e 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -77,6 +77,7 @@ impl RenderOnce for Modal { .w_full() .flex_1() .gap(DynamicSpacing::Base08.rems(cx)) + .when(self.footer.is_some(), |this| this.pb_4()) .when_some( self.container_scroll_handler, |this, container_scroll_handle| { @@ -276,7 +277,6 @@ impl RenderOnce for ModalFooter { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { h_flex() .w_full() - .mt_4() .p(DynamicSpacing::Base08.rems(cx)) .flex_none() .justify_between() diff --git a/crates/ui/src/components/navigable.rs b/crates/ui/src/components/navigable.rs index a592bcc36f4cc490c4676a83660ace050025ee39..07e761f9c0c14daf551d272c1a1894da84e1b3cf 100644 --- a/crates/ui/src/components/navigable.rs +++ b/crates/ui/src/components/navigable.rs @@ -75,7 +75,7 @@ impl RenderOnce for Navigable { }) .unwrap_or(0); if let Some(entry) = children.get(target) { - entry.focus_handle.focus(window); + entry.focus_handle.focus(window, cx); if let Some(anchor) = &entry.scroll_anchor { anchor.scroll_to(window, cx); } @@ -89,7 +89,7 @@ impl RenderOnce for Navigable { .and_then(|index| index.checked_sub(1)) .or(children.len().checked_sub(1)); if let Some(entry) = target.and_then(|target| children.get(target)) { - entry.focus_handle.focus(window); + entry.focus_handle.focus(window, cx); if let Some(anchor) = &entry.scroll_anchor { anchor.scroll_to(window, cx); } diff --git a/crates/ui/src/components/notification/alert_modal.rs b/crates/ui/src/components/notification/alert_modal.rs index 9990dc1ce5f13e6834a009c4b8d7c14b594ccf36..52a084c847887a4dea7fd8b9a3fbad8390f68863 100644 --- a/crates/ui/src/components/notification/alert_modal.rs +++ b/crates/ui/src/components/notification/alert_modal.rs @@ -1,73 +1,161 @@ use crate::component_prelude::*; use crate::prelude::*; +use crate::{Checkbox, ListBulletItem, ToggleState}; +use gpui::Action; +use gpui::FocusHandle; use gpui::IntoElement; +use gpui::Stateful; use smallvec::{SmallVec, smallvec}; +use theme::ActiveTheme; + +type ActionHandler = Box) -> Stateful
>; #[derive(IntoElement, RegisterComponent)] pub struct AlertModal { id: ElementId, + header: Option, children: SmallVec<[AnyElement; 2]>, - title: SharedString, - primary_action: SharedString, - dismiss_label: SharedString, + footer: Option, + title: Option, + primary_action: Option, + dismiss_label: Option, + width: Option, + key_context: Option, + action_handlers: Vec, + focus_handle: Option, } impl AlertModal { - pub fn new(id: impl Into, title: impl Into) -> Self { + pub fn new(id: impl Into) -> Self { Self { id: id.into(), + header: None, children: smallvec![], - title: title.into(), - primary_action: "Ok".into(), - dismiss_label: "Cancel".into(), + footer: None, + title: None, + primary_action: None, + dismiss_label: None, + width: None, + key_context: None, + action_handlers: Vec::new(), + focus_handle: None, } } + pub fn title(mut self, title: impl Into) -> Self { + self.title = Some(title.into()); + self + } + + pub fn header(mut self, header: impl IntoElement) -> Self { + self.header = Some(header.into_any_element()); + self + } + + pub fn footer(mut self, footer: impl IntoElement) -> Self { + self.footer = Some(footer.into_any_element()); + self + } + pub fn primary_action(mut self, primary_action: impl Into) -> Self { - self.primary_action = primary_action.into(); + self.primary_action = Some(primary_action.into()); self } pub fn dismiss_label(mut self, dismiss_label: impl Into) -> Self { - self.dismiss_label = dismiss_label.into(); + self.dismiss_label = Some(dismiss_label.into()); + self + } + + pub fn width(mut self, width: impl Into) -> Self { + self.width = Some(width.into()); + self + } + + pub fn key_context(mut self, key_context: impl Into) -> Self { + self.key_context = Some(key_context.into()); + self + } + + pub fn on_action( + mut self, + listener: impl Fn(&A, &mut Window, &mut App) + 'static, + ) -> Self { + self.action_handlers + .push(Box::new(move |div| div.on_action(listener))); + self + } + + pub fn track_focus(mut self, focus_handle: &gpui::FocusHandle) -> Self { + self.focus_handle = Some(focus_handle.clone()); self } } impl RenderOnce for AlertModal { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - v_flex() + let width = self.width.unwrap_or_else(|| px(440.).into()); + let has_default_footer = self.primary_action.is_some() || self.dismiss_label.is_some(); + + let mut modal = v_flex() + .when_some(self.key_context, |this, key_context| { + this.key_context(key_context.as_str()) + }) + .when_some(self.focus_handle, |this, focus_handle| { + this.track_focus(&focus_handle) + }) .id(self.id) .elevation_3(cx) - .w(px(440.)) - .p_5() - .child( + .w(width) + .bg(cx.theme().colors().elevated_surface_background) + .overflow_hidden(); + + for handler in self.action_handlers { + modal = handler(modal); + } + + if let Some(header) = self.header { + modal = modal.child(header); + } else if let Some(title) = self.title { + modal = modal.child( + v_flex() + .pt_3() + .pr_3() + .pl_3() + .pb_1() + .child(Headline::new(title).size(HeadlineSize::Small)), + ); + } + + if !self.children.is_empty() { + modal = modal.child( v_flex() + .p_3() .text_ui(cx) .text_color(Color::Muted.color(cx)) .gap_1() - .child(Headline::new(self.title).size(HeadlineSize::Small)) .children(self.children), - ) - .child( + ); + } + + if let Some(footer) = self.footer { + modal = modal.child(footer); + } else if has_default_footer { + let primary_action = self.primary_action.unwrap_or_else(|| "Ok".into()); + let dismiss_label = self.dismiss_label.unwrap_or_else(|| "Cancel".into()); + + modal = modal.child( h_flex() - .h(rems(1.75)) + .p_3() .items_center() - .child(div().flex_1()) - .child( - h_flex() - .items_center() - .gap_1() - .child( - Button::new(self.dismiss_label.clone(), self.dismiss_label.clone()) - .color(Color::Muted), - ) - .child(Button::new( - self.primary_action.clone(), - self.primary_action, - )), - ), - ) + .justify_end() + .gap_1() + .child(Button::new(dismiss_label.clone(), dismiss_label).color(Color::Muted)) + .child(Button::new(primary_action.clone(), primary_action)), + ); + } + + modal } } @@ -90,24 +178,75 @@ impl Component for AlertModal { Some("A modal dialog that presents an alert message with primary and dismiss actions.") } - fn preview(_window: &mut Window, _cx: &mut App) -> Option { + fn preview(_window: &mut Window, cx: &mut App) -> Option { Some( v_flex() .gap_6() .p_4() - .children(vec![example_group( - vec![ - single_example( - "Basic Alert", - AlertModal::new("simple-modal", "Do you want to leave the current call?") - .child("The current window will be closed, and connections to any shared projects will be terminated." - ) - .primary_action("Leave Call") - .into_any_element(), - ) - ], - )]) - .into_any_element() + .children(vec![ + example_group(vec![single_example( + "Basic Alert", + AlertModal::new("simple-modal") + .title("Do you want to leave the current call?") + .child( + "The current window will be closed, and connections to any shared projects will be terminated." + ) + .primary_action("Leave Call") + .dismiss_label("Cancel") + .into_any_element(), + )]), + example_group(vec![single_example( + "Custom Header", + AlertModal::new("custom-header-modal") + .header( + v_flex() + .p_3() + .bg(cx.theme().colors().background) + .gap_1() + .child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child(Headline::new("Unrecognized Workspace").size(HeadlineSize::Small)) + ) + .child( + h_flex() + .pl(IconSize::default().rems() + rems(0.5)) + .child(Label::new("~/projects/my-project").color(Color::Muted)) + ) + ) + .child( + "Untrusted workspaces are opened in Restricted Mode to protect your system. +Review .zed/settings.json for any extensions or commands configured by this project.", + ) + .child( + v_flex() + .mt_1() + .child(Label::new("Restricted mode prevents:").color(Color::Muted)) + .child(ListBulletItem::new("Project settings from being applied")) + .child(ListBulletItem::new("Language servers from running")) + .child(ListBulletItem::new("MCP integrations from installing")) + ) + .footer( + h_flex() + .p_3() + .justify_between() + .child( + Checkbox::new("trust-parent", ToggleState::Unselected) + .label("Trust all projects in parent directory") + ) + .child( + h_flex() + .gap_1() + .child(Button::new("restricted", "Stay in Restricted Mode").color(Color::Muted)) + .child(Button::new("trust", "Trust and Continue").style(ButtonStyle::Filled)) + ) + ) + .width(rems(40.)) + .into_any_element(), + )]), + ]) + .into_any_element(), ) } } diff --git a/crates/ui/src/components/popover_menu.rs b/crates/ui/src/components/popover_menu.rs index b1a52bec8fdf1f7030b5b321bed7702d602ff212..cd79e50ce01b1f4e697b252801c2ae76765726d2 100644 --- a/crates/ui/src/components/popover_menu.rs +++ b/crates/ui/src/components/popover_menu.rs @@ -281,13 +281,25 @@ fn show_menu( if modal.focus_handle(cx).contains_focused(window, cx) && let Some(previous_focus_handle) = previous_focus_handle.as_ref() { - window.focus(previous_focus_handle); + window.focus(previous_focus_handle, cx); } *menu2.borrow_mut() = None; window.refresh(); }) .detach(); - window.focus(&new_menu.focus_handle(cx)); + + // Since menus are rendered in a deferred fashion, their focus handles are + // not linked in the dispatch tree until after the deferred draw callback + // runs. We need to wait for that to happen before focusing it, so that + // calling `contains_focused` on the parent's focus handle returns `true` + // when the menu is focused. This prevents the pane's tab bar buttons from + // flickering when opening popover menus. + let focus_handle = new_menu.focus_handle(cx); + window.on_next_frame(move |window, _cx| { + window.on_next_frame(move |window, cx| { + window.focus(&focus_handle, cx); + }); + }); *menu.borrow_mut() = Some(new_menu); window.refresh(); diff --git a/crates/ui/src/components/right_click_menu.rs b/crates/ui/src/components/right_click_menu.rs index 761189671b935bf1f3d9e3f7d4d547528cf20196..faf2cb3429b610727209e13188656c174aefb655 100644 --- a/crates/ui/src/components/right_click_menu.rs +++ b/crates/ui/src/components/right_click_menu.rs @@ -223,7 +223,6 @@ impl Element for RightClickMenu { if let Some(mut menu) = request_layout.menu_element.take() { menu.paint(window, cx); - return; } let Some(builder) = this.menu_builder.take() else { @@ -254,13 +253,25 @@ impl Element for RightClickMenu { && let Some(previous_focus_handle) = previous_focus_handle.as_ref() { - window.focus(previous_focus_handle); + window.focus(previous_focus_handle, cx); } *menu2.borrow_mut() = None; window.refresh(); }) .detach(); - window.focus(&new_menu.focus_handle(cx)); + + // Since menus are rendered in a deferred fashion, their focus handles are + // not linked in the dispatch tree until after the deferred draw callback + // runs. We need to wait for that to happen before focusing it, so that + // calling `contains_focused` on the parent's focus handle returns `true` + // when the menu is focused. This prevents the pane's tab bar buttons from + // flickering when opening menus. + let focus_handle = new_menu.focus_handle(cx); + window.on_next_frame(move |window, _cx| { + window.on_next_frame(move |window, cx| { + window.focus(&focus_handle, cx); + }); + }); *menu.borrow_mut() = Some(new_menu); *position.borrow_mut() = if let Some(child_bounds) = child_bounds { if let Some(attach) = attach { diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index d3d33a296bbd65edb24371d8f5f1e6462e77e3fe..391d480fb313d078bb20ab790ecbb61d7425257a 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -150,9 +150,9 @@ pub trait WithScrollbar: Sized { // } #[track_caller] - fn vertical_scrollbar_for( + fn vertical_scrollbar_for( self, - scroll_handle: ScrollHandle, + scroll_handle: &ScrollHandle, window: &mut Window, cx: &mut App, ) -> Self::Output { @@ -441,7 +441,7 @@ impl Scrollbars { pub fn tracked_scroll_handle( self, - tracked_scroll_handle: TrackedHandle, + tracked_scroll_handle: &TrackedHandle, ) -> Scrollbars { let Self { id, @@ -454,7 +454,7 @@ impl Scrollbars { } = self; Scrollbars { - scrollable_handle: Handle::Tracked(tracked_scroll_handle), + scrollable_handle: Handle::Tracked(tracked_scroll_handle.clone()), id, tracked_entity: tracked_entity_id, visibility, @@ -968,7 +968,7 @@ impl ScrollableHandle for ScrollHandle { } } -pub trait ScrollableHandle: 'static + Any + Sized { +pub trait ScrollableHandle: 'static + Any + Sized + Clone { fn max_offset(&self) -> Size; fn set_offset(&self, point: Point); fn offset(&self) -> Point; diff --git a/crates/ui/src/components/stories.rs b/crates/ui/src/components/stories.rs index 05e8cd18d51d16ee4bc355bfa455891183d1749b..bcfcfd04c3176d6a19385101360ab21bffb9cc8e 100644 --- a/crates/ui/src/components/stories.rs +++ b/crates/ui/src/components/stories.rs @@ -1,19 +1,3 @@ mod context_menu; -mod icon_button; -mod keybinding; -mod list; -mod list_header; -mod list_item; -mod tab; -mod tab_bar; -mod toggle_button; pub use context_menu::*; -pub use icon_button::*; -pub use keybinding::*; -pub use list::*; -pub use list_header::*; -pub use list_item::*; -pub use tab::*; -pub use tab_bar::*; -pub use toggle_button::*; diff --git a/crates/ui/src/components/stories/avatar.rs b/crates/ui/src/components/stories/avatar.rs deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/crates/ui/src/components/stories/button.rs b/crates/ui/src/components/stories/button.rs deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/crates/ui/src/components/stories/disclosure.rs b/crates/ui/src/components/stories/disclosure.rs deleted file mode 100644 index 5a395388f450a19270426a6df7efa78d490792c2..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/disclosure.rs +++ /dev/null @@ -1,18 +0,0 @@ -use gpui::Render; -use story::Story; - -use crate::Disclosure; -use crate::prelude::*; - -pub struct DisclosureStory; - -impl Render for DisclosureStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Toggled")) - .child(Disclosure::new("toggled", true)) - .child(Story::label("Not Toggled")) - .child(Disclosure::new("not_toggled", false)) - } -} diff --git a/crates/ui/src/components/stories/icon_button.rs b/crates/ui/src/components/stories/icon_button.rs deleted file mode 100644 index 166297eabc389ca5cc4dea5070c21cb9efa00133..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/icon_button.rs +++ /dev/null @@ -1,148 +0,0 @@ -use gpui::Render; -use story::{Story, StoryItem, StorySection}; - -use crate::{IconButton, IconName}; -use crate::{IconButtonShape, Tooltip, prelude::*}; - -pub struct IconButtonStory; - -impl Render for IconButtonStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let default_button = StoryItem::new( - "Default", - IconButton::new("default_icon_button", IconName::Hash), - ) - .description("Displays an icon button.") - .usage( - r#" - IconButton::new("default_icon_button", Icon::Hash) - "#, - ); - - let selected_button = StoryItem::new( - "Selected", - IconButton::new("selected_icon_button", IconName::Hash).toggle_state(true), - ) - .description("Displays an icon button that is selected.") - .usage( - r#" - IconButton::new("selected_icon_button", Icon::Hash).selected(true) - "#, - ); - - let selected_with_selected_icon = StoryItem::new( - "Selected with `selected_icon`", - IconButton::new("selected_with_selected_icon_button", IconName::AudioOn) - .toggle_state(true) - .selected_icon(IconName::AudioOff), - ) - .description( - "Displays an icon button that is selected and shows a different icon when selected.", - ) - .usage( - r#" - IconButton::new("selected_with_selected_icon_button", Icon::AudioOn) - .selected(true) - .selected_icon(Icon::AudioOff) - "#, - ); - - let disabled_button = StoryItem::new( - "Disabled", - IconButton::new("disabled_icon_button", IconName::Hash).disabled(true), - ) - .description("Displays an icon button that is disabled.") - .usage( - r#" - IconButton::new("disabled_icon_button", Icon::Hash).disabled(true) - "#, - ); - - let with_on_click_button = StoryItem::new( - "With `on_click`", - IconButton::new("with_on_click_button", IconName::Ai).on_click( - |_event, _window, _cx| { - println!("Clicked!"); - }, - ), - ) - .description("Displays an icon button which triggers an event on click.") - .usage( - r#" - IconButton::new("with_on_click_button", Icon::Ai).on_click(|_event, _cx| { - println!("Clicked!"); - }) - "#, - ); - - let with_tooltip_button = StoryItem::new( - "With `tooltip`", - IconButton::new("with_tooltip_button", IconName::Chat) - .tooltip(Tooltip::text("Open messages")), - ) - .description("Displays an icon button that has a tooltip when hovered.") - .usage( - r#" - IconButton::new("with_tooltip_button", Icon::MessageBubbles) - .tooltip(Tooltip::text_f("Open messages")) - "#, - ); - - let selected_with_tooltip_button = StoryItem::new( - "Selected with `tooltip`", - IconButton::new("selected_with_tooltip_button", IconName::CaseSensitive) - .toggle_state(true) - .tooltip(Tooltip::text("Toggle inlay hints")), - ) - .description("Displays a selected icon button with tooltip.") - .usage( - r#" - IconButton::new("selected_with_tooltip_button", Icon::InlayHint) - .selected(true) - .tooltip(Tooltip::text_f("Toggle inlay hints")) - "#, - ); - - let buttons = vec![ - default_button, - selected_button, - selected_with_selected_icon, - disabled_button, - with_on_click_button, - with_tooltip_button, - selected_with_tooltip_button, - ]; - - Story::container(cx) - .child(Story::title_for::(cx)) - .child(StorySection::new().children(buttons)) - .child( - StorySection::new().child(StoryItem::new( - "Square", - h_flex() - .gap_2() - .child( - IconButton::new("square-medium", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Medium), - ) - .child( - IconButton::new("square-small", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small), - ) - .child( - IconButton::new("square-xsmall", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::XSmall), - ) - .child( - IconButton::new("square-indicator", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Indicator), - ), - )), - ) - .into_element() - } -} diff --git a/crates/ui/src/components/stories/keybinding.rs b/crates/ui/src/components/stories/keybinding.rs deleted file mode 100644 index 5840a11cf702f7a47aed06791ab47f12e2418d9c..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/keybinding.rs +++ /dev/null @@ -1,136 +0,0 @@ -use gpui::NoAction; -use gpui::Render; -use itertools::Itertools; -use settings::KeybindSource; -use story::Story; - -use crate::{KeyBinding, prelude::*}; - -pub struct KeybindingStory; - -pub fn binding(key: &str) -> gpui::KeyBinding { - gpui::KeyBinding::new(key, NoAction {}, None) -} - -impl Render for KeybindingStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let all_modifier_permutations = ["ctrl", "alt", "cmd", "shift"].into_iter().permutations(2); - - const SOURCE: KeybindSource = KeybindSource::Base; - - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Single Key", cx)) - .child(KeyBinding::from_keystrokes( - binding("Z").keystrokes().into(), - SOURCE, - )) - .child(Story::label("Single Key with Modifier", cx)) - .child( - div() - .flex() - .gap_3() - .child(KeyBinding::from_keystrokes( - binding("ctrl-c").keystrokes().into(), - SOURCE, - )) - .child(KeyBinding::from_keystrokes( - binding("alt-c").keystrokes().into(), - SOURCE, - )) - .child(KeyBinding::from_keystrokes( - binding("cmd-c").keystrokes().into(), - SOURCE, - )) - .child(KeyBinding::from_keystrokes( - binding("shift-c").keystrokes().into(), - SOURCE, - )), - ) - .child(Story::label("Single Key with Modifier (Permuted)", cx)) - .child( - div().flex().flex_col().children( - all_modifier_permutations - .chunks(4) - .into_iter() - .map(|chunk| { - div() - .flex() - .gap_4() - .py_3() - .children(chunk.map(|permutation| { - KeyBinding::from_keystrokes( - binding(&(permutation.join("-") + "-x")) - .keystrokes() - .into(), - SOURCE, - ) - })) - }), - ), - ) - .child(Story::label("Single Key with All Modifiers", cx)) - .child(KeyBinding::from_keystrokes( - binding("ctrl-alt-cmd-shift-z").keystrokes().into(), - SOURCE, - )) - .child(Story::label("Chord", cx)) - .child(KeyBinding::from_keystrokes( - binding("a z").keystrokes().into(), - SOURCE, - )) - .child(Story::label("Chord with Modifier", cx)) - .child(KeyBinding::from_keystrokes( - binding("ctrl-a shift-z").keystrokes().into(), - SOURCE, - )) - .child(KeyBinding::from_keystrokes( - binding("fn-s").keystrokes().into(), - SOURCE, - )) - .child(Story::label("Single Key with All Modifiers (Linux)", cx)) - .child( - KeyBinding::from_keystrokes( - binding("ctrl-alt-cmd-shift-z").keystrokes().into(), - SOURCE, - ) - .platform_style(PlatformStyle::Linux), - ) - .child(Story::label("Chord (Linux)", cx)) - .child( - KeyBinding::from_keystrokes(binding("a z").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Linux), - ) - .child(Story::label("Chord with Modifier (Linux)", cx)) - .child( - KeyBinding::from_keystrokes(binding("ctrl-a shift-z").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Linux), - ) - .child( - KeyBinding::from_keystrokes(binding("fn-s").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Linux), - ) - .child(Story::label("Single Key with All Modifiers (Windows)", cx)) - .child( - KeyBinding::from_keystrokes( - binding("ctrl-alt-cmd-shift-z").keystrokes().into(), - SOURCE, - ) - .platform_style(PlatformStyle::Windows), - ) - .child(Story::label("Chord (Windows)", cx)) - .child( - KeyBinding::from_keystrokes(binding("a z").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Windows), - ) - .child(Story::label("Chord with Modifier (Windows)", cx)) - .child( - KeyBinding::from_keystrokes(binding("ctrl-a shift-z").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Windows), - ) - .child( - KeyBinding::from_keystrokes(binding("fn-s").keystrokes().into(), SOURCE) - .platform_style(PlatformStyle::Windows), - ) - } -} diff --git a/crates/ui/src/components/stories/list.rs b/crates/ui/src/components/stories/list.rs deleted file mode 100644 index 6a0e672d31771fd2c946e2c207ae052baf77fb01..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/list.rs +++ /dev/null @@ -1,36 +0,0 @@ -use gpui::Render; -use story::Story; - -use crate::{List, ListItem}; -use crate::{ListHeader, ListSeparator, ListSubHeader, prelude::*}; - -pub struct ListStory; - -impl Render for ListStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Default", cx)) - .child( - List::new() - .child(ListItem::new("apple").child("Apple")) - .child(ListItem::new("banana").child("Banana")) - .child(ListItem::new("cherry").child("Cherry")), - ) - .child(Story::label("With sections", cx)) - .child( - List::new() - .header(ListHeader::new("Produce")) - .child(ListSubHeader::new("Fruits")) - .child(ListItem::new("apple").child("Apple")) - .child(ListItem::new("banana").child("Banana")) - .child(ListItem::new("cherry").child("Cherry")) - .child(ListSeparator) - .child(ListSubHeader::new("Root Vegetables")) - .child(ListItem::new("carrot").child("Carrot")) - .child(ListItem::new("potato").child("Potato")) - .child(ListSubHeader::new("Leafy Vegetables")) - .child(ListItem::new("kale").child("Kale")), - ) - } -} diff --git a/crates/ui/src/components/stories/list_header.rs b/crates/ui/src/components/stories/list_header.rs deleted file mode 100644 index f7fa068d5a11cb0bd772dc4c10fd19c048ae0181..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/list_header.rs +++ /dev/null @@ -1,31 +0,0 @@ -use gpui::Render; -use story::Story; - -use crate::{IconButton, prelude::*}; -use crate::{IconName, ListHeader}; - -pub struct ListHeaderStory; - -impl Render for ListHeaderStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Default", cx)) - .child(ListHeader::new("Section 1")) - .child(Story::label("With left icon", cx)) - .child(ListHeader::new("Section 2").start_slot(Icon::new(IconName::Bell))) - .child(Story::label("With left icon and meta", cx)) - .child( - ListHeader::new("Section 3") - .start_slot(Icon::new(IconName::BellOff)) - .end_slot(IconButton::new("action_1", IconName::BoltFilled)), - ) - .child(Story::label("With multiple meta", cx)) - .child( - ListHeader::new("Section 4") - .end_slot(IconButton::new("action_1", IconName::BoltFilled)) - .end_slot(IconButton::new("action_2", IconName::Warning)) - .end_slot(IconButton::new("action_3", IconName::Plus)), - ) - } -} diff --git a/crates/ui/src/components/stories/list_item.rs b/crates/ui/src/components/stories/list_item.rs deleted file mode 100644 index ee8f5e6c7280215c81f4bc9e71685a1ffec11c80..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/list_item.rs +++ /dev/null @@ -1,131 +0,0 @@ -use gpui::Render; -use story::Story; - -use crate::{Avatar, prelude::*}; -use crate::{IconName, ListItem}; - -const OVERFLOWING_TEXT: &str = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean mauris ligula, luctus vel dignissim eu, vestibulum sed libero. Sed at convallis velit."; - -pub struct ListItemStory; - -impl Render for ListItemStory { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .bg(cx.theme().colors().background) - .child(Story::title_for::(cx)) - .child(Story::label("Default", cx)) - .child(ListItem::new("hello_world").child("Hello, world!")) - .child(Story::label("Inset", cx)) - .child( - ListItem::new("inset_list_item") - .inset(true) - .start_slot( - Icon::new(IconName::Bell) - .size(IconSize::Small) - .color(Color::Muted), - ) - .child("Hello, world!") - .end_slot( - Icon::new(IconName::Bell) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - .child(Story::label("With start slot icon", cx)) - .child( - ListItem::new("with start slot_icon") - .child("Hello, world!") - .start_slot( - Icon::new(IconName::Bell) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - .child(Story::label("With start slot avatar", cx)) - .child( - ListItem::new("with_start slot avatar") - .child("Hello, world!") - .start_slot(Avatar::new( - "https://avatars.githubusercontent.com/u/1714999?v=4", - )), - ) - .child(Story::label("With end slot", cx)) - .child( - ListItem::new("with_left_avatar") - .child("Hello, world!") - .end_slot(Avatar::new( - "https://avatars.githubusercontent.com/u/1714999?v=4", - )), - ) - .child(Story::label("With end hover slot", cx)) - .child( - ListItem::new("with_end_hover_slot") - .child("Hello, world!") - .end_slot( - h_flex() - .gap_2() - .child(Avatar::new( - "https://avatars.githubusercontent.com/u/1789?v=4", - )) - .child(Avatar::new( - "https://avatars.githubusercontent.com/u/1789?v=4", - )) - .child(Avatar::new( - "https://avatars.githubusercontent.com/u/1789?v=4", - )) - .child(Avatar::new( - "https://avatars.githubusercontent.com/u/1789?v=4", - )) - .child(Avatar::new( - "https://avatars.githubusercontent.com/u/1789?v=4", - )), - ) - .end_hover_slot(Avatar::new( - "https://avatars.githubusercontent.com/u/1714999?v=4", - )), - ) - .child(Story::label("With `on_click`", cx)) - .child(ListItem::new("with_on_click").child("Click me").on_click( - |_event, _window, _cx| { - println!("Clicked!"); - }, - )) - .child(Story::label("With `on_secondary_mouse_down`", cx)) - .child( - ListItem::new("with_on_secondary_mouse_down") - .child("Right click me") - .on_secondary_mouse_down(|_event, _window, _cx| { - println!("Right mouse down!"); - }), - ) - .child(Story::label( - "With overflowing content in the `end_slot`", - cx, - )) - .child( - ListItem::new("with_overflowing_content_in_end_slot") - .child("An excerpt") - .end_slot(Label::new(OVERFLOWING_TEXT).color(Color::Muted)), - ) - .child(Story::label( - "`inset` with overflowing content in the `end_slot`", - cx, - )) - .child( - ListItem::new("inset_with_overflowing_content_in_end_slot") - .inset(true) - .child("An excerpt") - .end_slot(Label::new(OVERFLOWING_TEXT).color(Color::Muted)), - ) - .child(Story::label( - "`inset` with overflowing content in `children` and `end_slot`", - cx, - )) - .child( - ListItem::new("inset_with_overflowing_content_in_children_and_end_slot") - .inset(true) - .child(Label::new(OVERFLOWING_TEXT)) - .end_slot(Label::new(OVERFLOWING_TEXT).color(Color::Muted)), - ) - } -} diff --git a/crates/ui/src/components/stories/tab.rs b/crates/ui/src/components/stories/tab.rs deleted file mode 100644 index e6c80c54e9752ff7ebee68a70f3af6d7023d0c74..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/tab.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::cmp::Ordering; - -use gpui::Render; -use story::Story; - -use crate::{IconButtonShape, TabPosition, prelude::*}; -use crate::{Indicator, Tab}; - -pub struct TabStory; - -impl Render for TabStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Default", cx)) - .child(h_flex().child(Tab::new("tab_1").child("Tab 1"))) - .child(Story::label("With indicator", cx)) - .child( - h_flex().child( - Tab::new("tab_1") - .start_slot(Indicator::dot().color(Color::Warning)) - .child("Tab 1"), - ), - ) - .child(Story::label("With close button", cx)) - .child( - h_flex().child( - Tab::new("tab_1") - .end_slot( - IconButton::new("close_button", IconName::Close) - .visible_on_hover("") - .shape(IconButtonShape::Square) - .icon_color(Color::Muted) - .size(ButtonSize::None) - .icon_size(IconSize::XSmall), - ) - .child("Tab 1"), - ), - ) - .child(Story::label("List of tabs", cx)) - .child( - h_flex() - .child(Tab::new("tab_1").child("Tab 1")) - .child(Tab::new("tab_2").child("Tab 2")), - ) - .child(Story::label("List of tabs with first tab selected", cx)) - .child( - h_flex() - .child( - Tab::new("tab_1") - .toggle_state(true) - .position(TabPosition::First) - .child("Tab 1"), - ) - .child( - Tab::new("tab_2") - .position(TabPosition::Middle(Ordering::Greater)) - .child("Tab 2"), - ) - .child( - Tab::new("tab_3") - .position(TabPosition::Middle(Ordering::Greater)) - .child("Tab 3"), - ) - .child(Tab::new("tab_4").position(TabPosition::Last).child("Tab 4")), - ) - .child(Story::label("List of tabs with last tab selected", cx)) - .child( - h_flex() - .child( - Tab::new("tab_1") - .position(TabPosition::First) - .child("Tab 1"), - ) - .child( - Tab::new("tab_2") - .position(TabPosition::Middle(Ordering::Less)) - .child("Tab 2"), - ) - .child( - Tab::new("tab_3") - .position(TabPosition::Middle(Ordering::Less)) - .child("Tab 3"), - ) - .child( - Tab::new("tab_4") - .position(TabPosition::Last) - .toggle_state(true) - .child("Tab 4"), - ), - ) - .child(Story::label("List of tabs with second tab selected", cx)) - .child( - h_flex() - .child( - Tab::new("tab_1") - .position(TabPosition::First) - .child("Tab 1"), - ) - .child( - Tab::new("tab_2") - .position(TabPosition::Middle(Ordering::Equal)) - .toggle_state(true) - .child("Tab 2"), - ) - .child( - Tab::new("tab_3") - .position(TabPosition::Middle(Ordering::Greater)) - .child("Tab 3"), - ) - .child(Tab::new("tab_4").position(TabPosition::Last).child("Tab 4")), - ) - } -} diff --git a/crates/ui/src/components/stories/tab_bar.rs b/crates/ui/src/components/stories/tab_bar.rs deleted file mode 100644 index fbb6c8c248af49a40c0246b9b249961b0198d880..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/tab_bar.rs +++ /dev/null @@ -1,59 +0,0 @@ -use gpui::Render; -use story::Story; - -use crate::{Tab, TabBar, TabPosition, prelude::*}; - -pub struct TabBarStory; - -impl Render for TabBarStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let tab_count = 20; - let selected_tab_index = 3; - - let tabs = (0..tab_count) - .map(|index| { - Tab::new(index) - .toggle_state(index == selected_tab_index) - .position(if index == 0 { - TabPosition::First - } else if index == tab_count - 1 { - TabPosition::Last - } else { - TabPosition::Middle(index.cmp(&selected_tab_index)) - }) - .child(Label::new(format!("Tab {}", index + 1)).color( - if index == selected_tab_index { - Color::Default - } else { - Color::Muted - }, - )) - }) - .collect::>(); - - Story::container(cx) - .child(Story::title_for::(cx)) - .child(Story::label("Default", cx)) - .child( - h_flex().child( - TabBar::new("tab_bar_1") - .start_child( - IconButton::new("navigate_backward", IconName::ArrowLeft) - .icon_size(IconSize::Small), - ) - .start_child( - IconButton::new("navigate_forward", IconName::ArrowRight) - .icon_size(IconSize::Small), - ) - .end_child( - IconButton::new("new", IconName::Plus).icon_size(IconSize::Small), - ) - .end_child( - IconButton::new("split_pane", IconName::Split) - .icon_size(IconSize::Small), - ) - .children(tabs), - ), - ) - } -} diff --git a/crates/ui/src/components/stories/toggle_button.rs b/crates/ui/src/components/stories/toggle_button.rs deleted file mode 100644 index 903c7059a872448d7d227340a066ef044a8db100..0000000000000000000000000000000000000000 --- a/crates/ui/src/components/stories/toggle_button.rs +++ /dev/null @@ -1,93 +0,0 @@ -use gpui::Render; -use story::{Story, StoryItem, StorySection}; - -use crate::{ToggleButton, prelude::*}; - -pub struct ToggleButtonStory; - -impl Render for ToggleButtonStory { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - Story::container(cx) - .child(Story::title_for::(cx)) - .child( - StorySection::new().child( - StoryItem::new( - "Default", - ToggleButton::new("default_toggle_button", "Hello"), - ) - .description("Displays a toggle button.") - .usage(""), - ), - ) - .child( - StorySection::new().child( - StoryItem::new( - "Toggle button group", - h_flex() - .child( - ToggleButton::new(1, "Apple") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .first(), - ) - .child( - ToggleButton::new(2, "Banana") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .middle(), - ) - .child( - ToggleButton::new(3, "Cherry") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .middle(), - ) - .child( - ToggleButton::new(4, "Dragonfruit") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .last(), - ), - ) - .description("Displays a group of toggle buttons.") - .usage(""), - ), - ) - .child( - StorySection::new().child( - StoryItem::new( - "Toggle button group with selection", - h_flex() - .child( - ToggleButton::new(1, "Apple") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .first(), - ) - .child( - ToggleButton::new(2, "Banana") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .toggle_state(true) - .middle(), - ) - .child( - ToggleButton::new(3, "Cherry") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .middle(), - ) - .child( - ToggleButton::new(4, "Dragonfruit") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .last(), - ), - ) - .description("Displays a group of toggle buttons.") - .usage(""), - ), - ) - .into_element() - } -} diff --git a/crates/ui/src/components/tab_bar.rs b/crates/ui/src/components/tab_bar.rs index 3c467c06ce2654c5886c30e42dfb7276fdb7d289..86598b8c6f1ab3a479313c7775405863e9e3b49b 100644 --- a/crates/ui/src/components/tab_bar.rs +++ b/crates/ui/src/components/tab_bar.rs @@ -10,6 +10,7 @@ pub struct TabBar { start_children: SmallVec<[AnyElement; 2]>, children: SmallVec<[AnyElement; 2]>, end_children: SmallVec<[AnyElement; 2]>, + pre_end_children: SmallVec<[AnyElement; 2]>, scroll_handle: Option, } @@ -20,12 +21,13 @@ impl TabBar { start_children: SmallVec::new(), children: SmallVec::new(), end_children: SmallVec::new(), + pre_end_children: SmallVec::new(), scroll_handle: None, } } - pub fn track_scroll(mut self, scroll_handle: ScrollHandle) -> Self { - self.scroll_handle = Some(scroll_handle); + pub fn track_scroll(mut self, scroll_handle: &ScrollHandle) -> Self { + self.scroll_handle = Some(scroll_handle.clone()); self } @@ -70,6 +72,15 @@ impl TabBar { self } + pub fn pre_end_child(mut self, end_child: impl IntoElement) -> Self + where + Self: Sized, + { + self.pre_end_children + .push(end_child.into_element().into_any()); + self + } + pub fn end_children(mut self, end_children: impl IntoIterator) -> Self where Self: Sized, @@ -137,18 +148,32 @@ impl RenderOnce for TabBar { .children(self.children), ), ) - .when(!self.end_children.is_empty(), |this| { - this.child( - h_flex() - .flex_none() - .gap(DynamicSpacing::Base04.rems(cx)) - .px(DynamicSpacing::Base06.rems(cx)) - .border_b_1() - .border_l_1() - .border_color(cx.theme().colors().border) - .children(self.end_children), - ) - }) + .when( + !self.end_children.is_empty() || !self.pre_end_children.is_empty(), + |this| { + this.child( + h_flex() + .flex_none() + .gap(DynamicSpacing::Base04.rems(cx)) + .px(DynamicSpacing::Base06.rems(cx)) + .children(self.pre_end_children) + .border_color(cx.theme().colors().border) + .border_b_1() + .when(!self.end_children.is_empty(), |div| { + div.child( + h_flex() + .h_full() + .flex_none() + .pl(DynamicSpacing::Base04.rems(cx)) + .gap(DynamicSpacing::Base04.rems(cx)) + .border_l_1() + .border_color(cx.theme().colors().border) + .children(self.end_children), + ) + }), + ) + }, + ) } } diff --git a/crates/ui/src/components/thread_item.rs b/crates/ui/src/components/thread_item.rs new file mode 100644 index 0000000000000000000000000000000000000000..a4f6a8a53348d78563900c2a53b30e95588c2aac --- /dev/null +++ b/crates/ui/src/components/thread_item.rs @@ -0,0 +1,260 @@ +use crate::{ + Chip, DecoratedIcon, DiffStat, IconDecoration, IconDecorationKind, SpinnerLabel, prelude::*, +}; +use gpui::{ClickEvent, SharedString}; + +#[derive(IntoElement, RegisterComponent)] +pub struct ThreadItem { + id: ElementId, + icon: IconName, + title: SharedString, + timestamp: SharedString, + running: bool, + generation_done: bool, + selected: bool, + added: Option, + removed: Option, + worktree: Option, + on_click: Option>, +} + +impl ThreadItem { + pub fn new(id: impl Into, title: impl Into) -> Self { + Self { + id: id.into(), + icon: IconName::ZedAgent, + title: title.into(), + timestamp: "".into(), + running: false, + generation_done: false, + selected: false, + added: None, + removed: None, + worktree: None, + on_click: None, + } + } + + pub fn timestamp(mut self, timestamp: impl Into) -> Self { + self.timestamp = timestamp.into(); + self + } + + pub fn icon(mut self, icon: IconName) -> Self { + self.icon = icon; + self + } + + pub fn running(mut self, running: bool) -> Self { + self.running = running; + self + } + + pub fn generation_done(mut self, generation_done: bool) -> Self { + self.generation_done = generation_done; + self + } + + pub fn selected(mut self, selected: bool) -> Self { + self.selected = selected; + self + } + + pub fn added(mut self, added: usize) -> Self { + self.added = Some(added); + self + } + + pub fn removed(mut self, removed: usize) -> Self { + self.removed = Some(removed); + self + } + + pub fn worktree(mut self, worktree: impl Into) -> Self { + self.worktree = Some(worktree.into()); + self + } + + pub fn on_click( + mut self, + handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static, + ) -> Self { + self.on_click = Some(Box::new(handler)); + self + } +} + +impl RenderOnce for ThreadItem { + fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { + let icon_container = || h_flex().size_4().justify_center(); + let agent_icon = Icon::new(self.icon) + .color(Color::Muted) + .size(IconSize::Small); + + let icon = if self.generation_done { + DecoratedIcon::new( + agent_icon, + Some( + IconDecoration::new( + IconDecorationKind::Dot, + cx.theme().colors().surface_background, + cx, + ) + .color(cx.theme().colors().text_accent) + .position(gpui::Point { + x: px(-2.), + y: px(-2.), + }), + ), + ) + .into_any_element() + } else { + agent_icon.into_any_element() + }; + + let has_no_changes = self.added.is_none() && self.removed.is_none(); + + v_flex() + .id(self.id.clone()) + .cursor_pointer() + .p_2() + .when(self.selected, |this| { + this.bg(cx.theme().colors().element_active) + }) + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .child( + h_flex() + .w_full() + .gap_1p5() + .child(icon) + .child(Label::new(self.title).truncate()) + .when(self.running, |this| { + this.child(icon_container().child(SpinnerLabel::new().color(Color::Accent))) + }), + ) + .child( + h_flex() + .gap_1p5() + .child(icon_container()) // Icon Spacing + .when_some(self.worktree, |this, name| { + this.child(Chip::new(name).label_size(LabelSize::XSmall)) + }) + .child( + Label::new(self.timestamp) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + Label::new("•") + .size(LabelSize::Small) + .color(Color::Muted) + .alpha(0.5), + ) + .when(has_no_changes, |this| { + this.child( + Label::new("No Changes") + .size(LabelSize::Small) + .color(Color::Muted), + ) + }) + .when(self.added.is_some() || self.removed.is_some(), |this| { + this.child(DiffStat::new( + self.id, + self.added.unwrap_or(0), + self.removed.unwrap_or(0), + )) + }), + ) + .when_some(self.on_click, |this, on_click| this.on_click(on_click)) + } +} + +impl Component for ThreadItem { + fn scope() -> ComponentScope { + ComponentScope::Agent + } + + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let container = || { + v_flex() + .w_72() + .border_1() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().panel_background) + }; + + let thread_item_examples = vec![ + single_example( + "Default", + container() + .child( + ThreadItem::new("ti-1", "Linking to the Agent Panel Depending on Settings") + .icon(IconName::AiOpenAi) + .timestamp("1:33 AM"), + ) + .into_any_element(), + ), + single_example( + "Generation Done", + container() + .child( + ThreadItem::new("ti-2", "Refine thread view scrolling behavior") + .timestamp("12:12 AM") + .generation_done(true), + ) + .into_any_element(), + ), + single_example( + "Running Agent", + container() + .child( + ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock") + .icon(IconName::AiClaude) + .timestamp("7:30 PM") + .running(true), + ) + .into_any_element(), + ), + single_example( + "In Worktree", + container() + .child( + ThreadItem::new("ti-4", "Add line numbers option to FileEditBlock") + .icon(IconName::AiClaude) + .timestamp("7:37 PM") + .worktree("link-agent-panel"), + ) + .into_any_element(), + ), + single_example( + "With Changes", + container() + .child( + ThreadItem::new("ti-5", "Managing user and project settings interactions") + .icon(IconName::AiClaude) + .timestamp("7:37 PM") + .added(10) + .removed(3), + ) + .into_any_element(), + ), + single_example( + "Selected Item", + container() + .child( + ThreadItem::new("ti-6", "Refine textarea interaction behavior") + .icon(IconName::AiGemini) + .timestamp("3:00 PM") + .selected(true), + ) + .into_any_element(), + ), + ]; + + Some( + example_group(thread_item_examples) + .vertical() + .into_any_element(), + ) + } +} diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs index ab66b71996d6c7b64d0d3867ab73bd9727816316..86ff1d8eff8691a2610a4a7e2268aaf47502e306 100644 --- a/crates/ui/src/components/toggle.rs +++ b/crates/ui/src/components/toggle.rs @@ -1,7 +1,8 @@ use gpui::{ - AnyElement, AnyView, ClickEvent, ElementId, Hsla, IntoElement, Styled, Window, div, hsla, - prelude::*, + AnyElement, AnyView, ClickEvent, ElementId, Hsla, IntoElement, KeybindingKeystroke, Keystroke, + Styled, Window, div, hsla, prelude::*, }; +use settings::KeybindSource; use std::{rc::Rc, sync::Arc}; use crate::utils::is_light; @@ -43,13 +44,16 @@ pub enum ToggleStyle { pub struct Checkbox { id: ElementId, toggle_state: ToggleState, + style: ToggleStyle, disabled: bool, placeholder: bool, - on_click: Option>, filled: bool, - style: ToggleStyle, - tooltip: Option AnyView>>, + visualization: bool, label: Option, + label_size: LabelSize, + label_color: Color, + tooltip: Option AnyView>>, + on_click: Option>, } impl Checkbox { @@ -58,13 +62,16 @@ impl Checkbox { Self { id: id.into(), toggle_state: checked, + style: ToggleStyle::default(), disabled: false, - on_click: None, + placeholder: false, filled: false, - style: ToggleStyle::default(), - tooltip: None, + visualization: false, label: None, - placeholder: false, + label_size: LabelSize::Default, + label_color: Color::Muted, + tooltip: None, + on_click: None, } } @@ -105,6 +112,13 @@ impl Checkbox { self } + /// Makes the checkbox look enabled but without pointer cursor and hover styles. + /// Primarily used for uninteractive markdown previews. + pub fn visualization_only(mut self, visualization: bool) -> Self { + self.visualization = visualization; + self + } + /// Sets the style of the checkbox using the specified [`ToggleStyle`]. pub fn style(mut self, style: ToggleStyle) -> Self { self.style = style; @@ -128,6 +142,16 @@ impl Checkbox { self.label = Some(label.into()); self } + + pub fn label_size(mut self, size: LabelSize) -> Self { + self.label_size = size; + self + } + + pub fn label_color(mut self, color: Color) -> Self { + self.label_color = color; + self + } } impl Checkbox { @@ -155,7 +179,6 @@ impl Checkbox { } } - /// container size pub fn container_size() -> Pixels { px(20.0) } @@ -169,6 +192,7 @@ impl RenderOnce for Checkbox { } else { Color::Selected }; + let icon = match self.toggle_state { ToggleState::Selected => { if self.placeholder { @@ -194,11 +218,10 @@ impl RenderOnce for Checkbox { let size = Self::container_size(); let checkbox = h_flex() + .group(group_id.clone()) .id(self.id.clone()) - .justify_center() - .items_center() .size(size) - .group(group_id.clone()) + .justify_center() .child( div() .flex() @@ -215,7 +238,7 @@ impl RenderOnce for Checkbox { .when(self.disabled, |this| { this.bg(cx.theme().colors().element_disabled.opacity(0.6)) }) - .when(!self.disabled, |this| { + .when(!self.disabled && !self.visualization, |this| { this.group_hover(group_id.clone(), |el| el.border_color(hover_border_color)) }) .when(self.placeholder, |this| { @@ -232,8 +255,27 @@ impl RenderOnce for Checkbox { h_flex() .id(self.id) + .map(|this| { + if self.disabled { + this.cursor_not_allowed() + } else if self.visualization { + this.cursor_default() + } else { + this.cursor_pointer() + } + }) .gap(DynamicSpacing::Base06.rems(cx)) .child(checkbox) + .when_some(self.label, |this, label| { + this.child( + Label::new(label) + .color(self.label_color) + .size(self.label_size), + ) + }) + .when_some(self.tooltip, |this, tooltip| { + this.tooltip(move |window, cx| tooltip(window, cx)) + }) .when_some( self.on_click.filter(|_| !self.disabled), |this, on_click| { @@ -242,111 +284,6 @@ impl RenderOnce for Checkbox { }) }, ) - // TODO: Allow label size to be different from default. - // TODO: Allow label color to be different from muted. - .when_some(self.label, |this, label| { - this.child(Label::new(label).color(Color::Muted)) - }) - .when_some(self.tooltip, |this, tooltip| { - this.tooltip(move |window, cx| tooltip(window, cx)) - }) - } -} - -/// A [`Checkbox`] that has a [`Label`]. -#[derive(IntoElement, RegisterComponent)] -pub struct CheckboxWithLabel { - id: ElementId, - label: Label, - checked: ToggleState, - on_click: Arc, - filled: bool, - style: ToggleStyle, - checkbox_position: IconPosition, -} - -// TODO: Remove `CheckboxWithLabel` now that `label` is a method of `Checkbox`. -impl CheckboxWithLabel { - /// Creates a checkbox with an attached label. - pub fn new( - id: impl Into, - label: Label, - checked: ToggleState, - on_click: impl Fn(&ToggleState, &mut Window, &mut App) + 'static, - ) -> Self { - Self { - id: id.into(), - label, - checked, - on_click: Arc::new(on_click), - filled: false, - style: ToggleStyle::default(), - checkbox_position: IconPosition::Start, - } - } - - /// Sets the style of the checkbox using the specified [`ToggleStyle`]. - pub fn style(mut self, style: ToggleStyle) -> Self { - self.style = style; - self - } - - /// Match the style of the checkbox to the current elevation using [`ToggleStyle::ElevationBased`]. - pub fn elevation(mut self, elevation: ElevationIndex) -> Self { - self.style = ToggleStyle::ElevationBased(elevation); - self - } - - /// Sets the `fill` setting of the checkbox, indicating whether it should be filled. - pub fn fill(mut self) -> Self { - self.filled = true; - self - } - - pub fn checkbox_position(mut self, position: IconPosition) -> Self { - self.checkbox_position = position; - self - } -} - -impl RenderOnce for CheckboxWithLabel { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - h_flex() - .gap(DynamicSpacing::Base08.rems(cx)) - .when(self.checkbox_position == IconPosition::Start, |this| { - this.child( - Checkbox::new(self.id.clone(), self.checked) - .style(self.style.clone()) - .when(self.filled, Checkbox::fill) - .on_click({ - let on_click = self.on_click.clone(); - move |checked, window, cx| { - (on_click)(checked, window, cx); - } - }), - ) - }) - .child( - div() - .id(SharedString::from(format!("{}-label", self.id))) - .on_click({ - let on_click = self.on_click.clone(); - move |_event, window, cx| { - (on_click)(&self.checked.inverse(), window, cx); - } - }) - .child(self.label), - ) - .when(self.checkbox_position == IconPosition::End, |this| { - this.child( - Checkbox::new(self.id.clone(), self.checked) - .style(self.style) - .when(self.filled, Checkbox::fill) - .on_click(move |checked, window, cx| { - (self.on_click)(checked, window, cx); - }), - ) - }) } } @@ -354,11 +291,7 @@ impl RenderOnce for CheckboxWithLabel { #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)] pub enum SwitchColor { #[default] - Default, Accent, - Error, - Warning, - Success, Custom(Hsla), } @@ -372,27 +305,10 @@ impl SwitchColor { } match self { - SwitchColor::Default => { - let colors = cx.theme().colors(); - let base_color = colors.text; - let bg_color = colors.element_background.blend(base_color.opacity(0.08)); - (bg_color, colors.border_variant) - } SwitchColor::Accent => { let status = cx.theme().status(); - (status.info.opacity(0.4), status.info.opacity(0.2)) - } - SwitchColor::Error => { - let status = cx.theme().status(); - (status.error.opacity(0.4), status.error.opacity(0.2)) - } - SwitchColor::Warning => { - let status = cx.theme().status(); - (status.warning.opacity(0.4), status.warning.opacity(0.2)) - } - SwitchColor::Success => { - let status = cx.theme().status(); - (status.success.opacity(0.4), status.success.opacity(0.2)) + let colors = cx.theme().colors(); + (status.info.opacity(0.4), colors.text_accent.opacity(0.2)) } SwitchColor::Custom(color) => (*color, color.opacity(0.6)), } @@ -402,16 +318,20 @@ impl SwitchColor { impl From for Color { fn from(color: SwitchColor) -> Self { match color { - SwitchColor::Default => Color::Default, SwitchColor::Accent => Color::Accent, - SwitchColor::Error => Color::Error, - SwitchColor::Warning => Color::Warning, - SwitchColor::Success => Color::Success, SwitchColor::Custom(_) => Color::Default, } } } +/// Defines the color for a switch component. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)] +pub enum SwitchLabelPosition { + Start, + #[default] + End, +} + /// # Switch /// /// Switches are used to represent opposite states, such as enabled or disabled. @@ -422,6 +342,9 @@ pub struct Switch { disabled: bool, on_click: Option>, label: Option, + label_position: Option, + label_size: LabelSize, + full_width: bool, key_binding: Option, color: SwitchColor, tab_index: Option, @@ -436,6 +359,9 @@ impl Switch { disabled: false, on_click: None, label: None, + label_position: None, + label_size: LabelSize::Small, + full_width: false, key_binding: None, color: SwitchColor::default(), tab_index: None, @@ -469,6 +395,24 @@ impl Switch { self } + pub fn label_position( + mut self, + label_position: impl Into>, + ) -> Self { + self.label_position = label_position.into(); + self + } + + pub fn label_size(mut self, size: LabelSize) -> Self { + self.label_size = size; + self + } + + pub fn full_width(mut self, full_width: bool) -> Self { + self.full_width = full_width; + self + } + /// Display the keybinding that triggers the switch action. pub fn key_binding(mut self, key_binding: impl Into>) -> Self { self.key_binding = key_binding.into(); @@ -503,6 +447,7 @@ impl RenderOnce for Switch { }; let group_id = format!("switch_group_{:?}", self.id); + let label = self.label; let switch = div() .id((self.id.clone(), "switch")) @@ -555,9 +500,27 @@ impl RenderOnce for Switch { h_flex() .id(self.id) - .gap(DynamicSpacing::Base06.rems(cx)) .cursor_pointer() + .gap(DynamicSpacing::Base06.rems(cx)) + .when(self.full_width, |this| this.w_full().justify_between()) + .when( + self.label_position == Some(SwitchLabelPosition::Start), + |this| { + this.when_some(label.clone(), |this, label| { + this.child(Label::new(label).size(self.label_size)) + }) + }, + ) .child(switch) + .when( + self.label_position == Some(SwitchLabelPosition::End), + |this| { + this.when_some(label, |this, label| { + this.child(Label::new(label).size(self.label_size)) + }) + }, + ) + .children(self.key_binding) .when_some( self.on_click.filter(|_| !self.disabled), |this, on_click| { @@ -566,10 +529,6 @@ impl RenderOnce for Switch { }) }, ) - .when_some(self.label, |this, label| { - this.child(Label::new(label).size(LabelSize::Small)) - }) - .children(self.key_binding) } } @@ -965,6 +924,15 @@ impl Component for Checkbox { .into_any_element(), )], ), + example_group_with_title( + "Extra", + vec![single_example( + "Visualization-Only", + Checkbox::new("viz_only", ToggleState::Selected) + .visualization_only(true) + .into_any_element(), + )], + ), ]) .into_any_element(), ) @@ -1006,37 +974,8 @@ impl Component for Switch { "Colors", vec![ single_example( - "Default", - Switch::new("switch_default_style", ToggleState::Selected) - .color(SwitchColor::Default) - .on_click(|_, _, _cx| {}) - .into_any_element(), - ), - single_example( - "Accent", + "Accent (Default)", Switch::new("switch_accent_style", ToggleState::Selected) - .color(SwitchColor::Accent) - .on_click(|_, _, _cx| {}) - .into_any_element(), - ), - single_example( - "Error", - Switch::new("switch_error_style", ToggleState::Selected) - .color(SwitchColor::Error) - .on_click(|_, _, _cx| {}) - .into_any_element(), - ), - single_example( - "Warning", - Switch::new("switch_warning_style", ToggleState::Selected) - .color(SwitchColor::Warning) - .on_click(|_, _, _cx| {}) - .into_any_element(), - ), - single_example( - "Success", - Switch::new("switch_success_style", ToggleState::Selected) - .color(SwitchColor::Success) .on_click(|_, _, _cx| {}) .into_any_element(), ), @@ -1070,75 +1009,55 @@ impl Component for Switch { "With Label", vec![ single_example( - "Label", - Switch::new("switch_with_label", ToggleState::Selected) + "Start Label", + Switch::new("switch_with_label_start", ToggleState::Selected) + .label("Always save on quit") + .label_position(SwitchLabelPosition::Start) + .into_any_element(), + ), + single_example( + "End Label", + Switch::new("switch_with_label_end", ToggleState::Selected) + .label("Always save on quit") + .label_position(SwitchLabelPosition::End) + .into_any_element(), + ), + single_example( + "Default Size Label", + Switch::new( + "switch_with_label_default_size", + ToggleState::Selected, + ) + .label("Always save on quit") + .label_size(LabelSize::Default) + .into_any_element(), + ), + single_example( + "Small Size Label", + Switch::new("switch_with_label_small_size", ToggleState::Selected) .label("Always save on quit") + .label_size(LabelSize::Small) .into_any_element(), ), - // TODO: Where did theme_preview_keybinding go? - // single_example( - // "Keybinding", - // Switch::new("switch_with_keybinding", ToggleState::Selected) - // .key_binding(theme_preview_keybinding("cmd-shift-e")) - // .into_any_element(), - // ), ], ), + example_group_with_title( + "With Keybinding", + vec![single_example( + "Keybinding", + Switch::new("switch_with_keybinding", ToggleState::Selected) + .key_binding(Some(KeyBinding::from_keystrokes( + vec![KeybindingKeystroke::from_keystroke( + Keystroke::parse("cmd-s").unwrap(), + )] + .into(), + KeybindSource::Base, + ))) + .into_any_element(), + )], + ), ]) .into_any_element(), ) } } - -impl Component for CheckboxWithLabel { - fn scope() -> ComponentScope { - ComponentScope::Input - } - - fn description() -> Option<&'static str> { - Some("A checkbox component with an attached label") - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - Some( - v_flex() - .gap_6() - .children(vec![example_group_with_title( - "States", - vec![ - single_example( - "Unselected", - CheckboxWithLabel::new( - "checkbox_with_label_unselected", - Label::new("Always save on quit"), - ToggleState::Unselected, - |_, _, _| {}, - ) - .into_any_element(), - ), - single_example( - "Indeterminate", - CheckboxWithLabel::new( - "checkbox_with_label_indeterminate", - Label::new("Always save on quit"), - ToggleState::Indeterminate, - |_, _, _| {}, - ) - .into_any_element(), - ), - single_example( - "Selected", - CheckboxWithLabel::new( - "checkbox_with_label_selected", - Label::new("Always save on quit"), - ToggleState::Selected, - |_, _, _| {}, - ) - .into_any_element(), - ), - ], - )]) - .into_any_element(), - ) - } -} diff --git a/crates/ui/src/traits/styled_ext.rs b/crates/ui/src/traits/styled_ext.rs index cf452a2826e75bd88910b605a90fe34aa0ea62bd..849e56a024ac1da03bb7a9de8dd574ea53f39627 100644 --- a/crates/ui/src/traits/styled_ext.rs +++ b/crates/ui/src/traits/styled_ext.rs @@ -18,7 +18,11 @@ fn elevated_borderless(this: E, cx: &mut App, index: ElevationIndex) } /// Extends [`gpui::Styled`] with Zed-specific styling methods. -#[cfg_attr(debug_assertions, gpui_macros::derive_inspector_reflection)] +// gate on rust-analyzer so rust-analyzer never needs to expand this macro, it takes up to 10 seconds to expand due to inefficiencies in rust-analyzers proc-macro srv +#[cfg_attr( + all(debug_assertions, not(rust_analyzer)), + gpui_macros::derive_inspector_reflection +)] pub trait StyledExt: Styled + Sized { /// Horizontally stacks elements. /// diff --git a/crates/ui_input/src/input_field.rs b/crates/ui_input/src/input_field.rs index 82f7f0261facef8a7c6a422b2ff4ed335229aeb3..2bae8c172dcecbc94aa591297831c4f43279197b 100644 --- a/crates/ui_input/src/input_field.rs +++ b/crates/ui_input/src/input_field.rs @@ -37,6 +37,10 @@ pub struct InputField { disabled: bool, /// The minimum width of for the input min_width: Length, + /// The tab index for keyboard navigation order. + tab_index: Option, + /// Whether this field is a tab stop (can be focused via Tab key). + tab_stop: bool, } impl Focusable for InputField { @@ -63,6 +67,8 @@ impl InputField { start_icon: None, disabled: false, min_width: px(192.).into(), + tab_index: None, + tab_stop: true, } } @@ -86,6 +92,16 @@ impl InputField { self } + pub fn tab_index(mut self, index: isize) -> Self { + self.tab_index = Some(index); + self + } + + pub fn tab_stop(mut self, tab_stop: bool) -> Self { + self.tab_stop = tab_stop; + self + } + pub fn set_disabled(&mut self, disabled: bool, cx: &mut Context) { self.disabled = disabled; self.editor @@ -104,6 +120,11 @@ impl InputField { self.editor().read(cx).text(cx) } + pub fn clear(&self, window: &mut Window, cx: &mut App) { + self.editor() + .update(cx, |editor, cx| editor.clear(window, cx)) + } + pub fn set_text(&self, text: impl Into>, window: &mut Window, cx: &mut App) { self.editor() .update(cx, |editor, cx| editor.set_text(text, window, cx)) @@ -111,7 +132,8 @@ impl InputField { } impl Render for InputField { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let editor = self.editor.clone(); let settings = ThemeSettings::get_global(cx); let theme_color = cx.theme().colors(); @@ -151,6 +173,16 @@ impl Render for InputField { ..Default::default() }; + let focus_handle = self.editor.focus_handle(cx); + + let configured_handle = if let Some(tab_index) = self.tab_index { + focus_handle.tab_index(tab_index).tab_stop(self.tab_stop) + } else if !self.tab_stop { + focus_handle.tab_stop(false) + } else { + focus_handle + }; + v_flex() .id(self.placeholder.clone()) .w_full() @@ -168,6 +200,7 @@ impl Render for InputField { }) .child( h_flex() + .track_focus(&configured_handle) .min_w(self.min_width) .min_h_8() .w_full() @@ -179,6 +212,10 @@ impl Render for InputField { .bg(style.background_color) .border_1() .border_color(style.border_color) + .when( + editor.focus_handle(cx).contains_focused(window, cx), + |this| this.border_color(theme_color.border_focused), + ) .when_some(self.start_icon, |this, icon| { this.gap_1() .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) diff --git a/crates/ui_input/src/number_field.rs b/crates/ui_input/src/number_field.rs index ee5c57b43b7c44db1c2ded122d3d4272a541c32e..2d596a2498f445f6a0d18ce48b02bddf20aee8da 100644 --- a/crates/ui_input/src/number_field.rs +++ b/crates/ui_input/src/number_field.rs @@ -476,7 +476,7 @@ impl RenderOnce for NumberField { if let Some(previous) = previous_focus_handle.as_ref() { - window.focus(previous); + window.focus(previous, cx); } on_change(&new_value, window, cx); }; @@ -485,7 +485,7 @@ impl RenderOnce for NumberField { }) .detach(); - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); editor } diff --git a/crates/util/src/archive.rs b/crates/util/src/archive.rs index 9b58b16bedb2114503a3d87756ae4b2c4d460190..5a5dc777722c67d3e5bb96ed7115ccd2a71b8cbe 100644 --- a/crates/util/src/archive.rs +++ b/crates/util/src/archive.rs @@ -169,6 +169,7 @@ mod tests { writer.close().await?; out.flush().await?; + out.sync_all().await?; Ok(()) } diff --git a/crates/util/src/command.rs b/crates/util/src/command.rs index dde1603dfe29df0315caf6d99f1d9e1d03b131c1..40f1ec323f6dd799bdda07da2540741d46c99cea 100644 --- a/crates/util/src/command.rs +++ b/crates/util/src/command.rs @@ -58,7 +58,7 @@ pub fn new_smol_command(program: impl AsRef) -> smol::process::Command { } #[cfg(target_os = "macos")] -fn reset_exception_ports() { +pub fn reset_exception_ports() { use mach2::exception_types::{ EXC_MASK_ALL, EXCEPTION_DEFAULT, exception_behavior_t, exception_mask_t, }; diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 015711fa2a17a12f7e47e37c5bee3a6941a29691..a54f91c7a0392748cb64c984559cf1ce25c2a7d8 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -1,8 +1,9 @@ use anyhow::Context; -use globset::{Glob, GlobSet, GlobSetBuilder}; +use globset::{GlobBuilder, GlobSet, GlobSetBuilder}; use itertools::Itertools; use regex::Regex; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; use std::cmp::Ordering; use std::error::Error; use std::fmt::{Display, Formatter}; @@ -15,6 +16,7 @@ use std::{ sync::LazyLock, }; +use crate::rel_path::RelPathBuf; use crate::{rel_path::RelPath, shell::ShellKind}; static HOME_DIR: OnceLock = OnceLock::new(); @@ -225,9 +227,16 @@ impl SanitizedPath { #[cfg(not(target_os = "windows"))] return unsafe { mem::transmute::, Arc>(path) }; - // TODO: could avoid allocating here if dunce::simplified results in the same path #[cfg(target_os = "windows")] - return Self::new(&path).into(); + { + let simplified = dunce::simplified(path.as_ref()); + if simplified == path.as_ref() { + // safe because `Path` and `SanitizedPath` have the same repr and Drop impl + unsafe { mem::transmute::, Arc>(path) } + } else { + Self::unchecked_new(simplified).into() + } + } } pub fn new_arc + ?Sized>(path: &T) -> Arc { @@ -331,17 +340,35 @@ impl PathStyle { } #[inline] - pub fn separator(&self) -> &'static str { + pub fn primary_separator(&self) -> &'static str { match self { PathStyle::Posix => "/", PathStyle::Windows => "\\", } } + pub fn separators(&self) -> &'static [&'static str] { + match self { + PathStyle::Posix => &["/"], + PathStyle::Windows => &["\\", "/"], + } + } + + pub fn separators_ch(&self) -> &'static [char] { + match self { + PathStyle::Posix => &['/'], + PathStyle::Windows => &['\\', '/'], + } + } + pub fn is_windows(&self) -> bool { *self == PathStyle::Windows } + pub fn is_posix(&self) -> bool { + *self == PathStyle::Posix + } + pub fn join(self, left: impl AsRef, right: impl AsRef) -> Option { let right = right.as_ref().to_str()?; if is_absolute(right, self) { @@ -353,25 +380,54 @@ impl PathStyle { } else { Some(format!( "{left}{}{right}", - if left.ends_with(self.separator()) { + if left.ends_with(self.primary_separator()) { "" } else { - self.separator() + self.primary_separator() } )) } } pub fn split(self, path_like: &str) -> (Option<&str>, &str) { - let Some(pos) = path_like.rfind(self.separator()) else { + let Some(pos) = path_like.rfind(self.primary_separator()) else { return (None, path_like); }; - let filename_start = pos + self.separator().len(); + let filename_start = pos + self.primary_separator().len(); ( Some(&path_like[..filename_start]), &path_like[filename_start..], ) } + + pub fn strip_prefix<'a>( + &self, + child: &'a Path, + parent: &'a Path, + ) -> Option> { + let parent = parent.to_str()?; + if parent.is_empty() { + return RelPath::new(child, *self).ok(); + } + let parent = self + .separators() + .iter() + .find_map(|sep| parent.strip_suffix(sep)) + .unwrap_or(parent); + let child = child.to_str()?; + let stripped = child.strip_prefix(parent)?; + if let Some(relative) = self + .separators() + .iter() + .find_map(|sep| stripped.strip_prefix(sep)) + { + RelPath::new(relative.as_ref(), *self).ok() + } else if stripped.is_empty() { + Some(Cow::Borrowed(RelPath::empty())) + } else { + None + } + } } #[derive(Debug, Clone)] @@ -625,7 +681,14 @@ impl PathWithPosition { pub fn parse_str(s: &str) -> Self { let trimmed = s.trim(); let path = Path::new(trimmed); - let maybe_file_name_with_row_col = path.file_name().unwrap_or_default().to_string_lossy(); + let Some(maybe_file_name_with_row_col) = path.file_name().unwrap_or_default().to_str() + else { + return Self { + path: Path::new(s).to_path_buf(), + row: None, + column: None, + }; + }; if maybe_file_name_with_row_col.is_empty() { return Self { path: Path::new(s).to_path_buf(), @@ -640,15 +703,15 @@ impl PathWithPosition { static SUFFIX_RE: LazyLock = LazyLock::new(|| Regex::new(ROW_COL_CAPTURE_REGEX).unwrap()); match SUFFIX_RE - .captures(&maybe_file_name_with_row_col) + .captures(maybe_file_name_with_row_col) .map(|caps| caps.extract()) { Some((_, [file_name, maybe_row, maybe_column])) => { let row = maybe_row.parse::().ok(); let column = maybe_column.parse::().ok(); - let suffix_length = maybe_file_name_with_row_col.len() - file_name.len(); - let path_without_suffix = &trimmed[..trimmed.len() - suffix_length]; + let (_, suffix) = trimmed.split_once(file_name).unwrap(); + let path_without_suffix = &trimmed[..trimmed.len() - suffix.len()]; Self { path: Path::new(path_without_suffix).to_path_buf(), @@ -725,17 +788,11 @@ impl PathWithPosition { #[derive(Clone, Debug)] pub struct PathMatcher { - sources: Vec, + sources: Vec<(String, RelPathBuf, /*trailing separator*/ bool)>, glob: GlobSet, path_style: PathStyle, } -// impl std::fmt::Display for PathMatcher { -// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { -// self.sources.fmt(f) -// } -// } - impl PartialEq for PathMatcher { fn eq(&self, other: &Self) -> bool { self.sources.eq(&other.sources) @@ -751,9 +808,25 @@ impl PathMatcher { ) -> Result { let globs = globs .into_iter() - .map(|as_str| Glob::new(as_str.as_ref())) + .map(|as_str| { + GlobBuilder::new(as_str.as_ref()) + .backslash_escape(path_style.is_posix()) + .build() + }) .collect::, _>>()?; - let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect(); + let sources = globs + .iter() + .filter_map(|glob| { + let glob = glob.glob(); + Some(( + glob.to_string(), + RelPath::new(&glob.as_ref(), path_style) + .ok() + .map(std::borrow::Cow::into_owned)?, + glob.ends_with(path_style.separators_ch()), + )) + }) + .collect(); let mut glob_builder = GlobSetBuilder::new(); for single_glob in globs { glob_builder.add(single_glob); @@ -766,27 +839,24 @@ impl PathMatcher { }) } - pub fn sources(&self) -> &[String] { - &self.sources + pub fn sources(&self) -> impl Iterator + Clone { + self.sources.iter().map(|(source, ..)| source.as_str()) } - pub fn is_match>(&self, other: P) -> bool { - let other_path = other.as_ref(); - self.sources.iter().any(|source| { - let as_bytes = other_path.as_os_str().as_encoded_bytes(); - as_bytes.starts_with(source.as_bytes()) || as_bytes.ends_with(source.as_bytes()) - }) || self.glob.is_match(other_path) - || self.check_with_end_separator(other_path) - } + pub fn is_match>(&self, other: P) -> bool { + if self.sources.iter().any(|(_, source, _)| { + other.as_ref().starts_with(source) || other.as_ref().ends_with(source) + }) { + return true; + } + let other_path = other.as_ref().display(self.path_style); - fn check_with_end_separator(&self, path: &Path) -> bool { - let path_str = path.to_string_lossy(); - let separator = self.path_style.separator(); - if path_str.ends_with(separator) { - false - } else { - self.glob.is_match(path_str.to_string() + separator) + if self.glob.is_match(&*other_path) { + return true; } + + self.glob + .is_match(other_path.into_owned() + self.path_style.primary_separator()) } } @@ -800,22 +870,6 @@ impl Default for PathMatcher { } } -/// Custom character comparison that prioritizes lowercase for same letters -fn compare_chars(a: char, b: char) -> Ordering { - // First compare case-insensitive - match a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()) { - Ordering::Equal => { - // If same letter, prioritize lowercase (lowercase < uppercase) - match (a.is_ascii_lowercase(), b.is_ascii_lowercase()) { - (true, false) => Ordering::Less, // lowercase comes first - (false, true) => Ordering::Greater, // uppercase comes after - _ => Ordering::Equal, // both same case or both non-ascii - } - } - other => other, - } -} - /// Compares two sequences of consecutive digits for natural sorting. /// /// This function is a core component of natural sorting that handles numeric comparison @@ -916,21 +970,25 @@ where /// * Numbers are compared by numeric value, not character by character /// * Leading zeros affect ordering when numeric values are equal /// * Can handle numbers larger than u128::MAX (falls back to string comparison) +/// * When strings are equal case-insensitively, lowercase is prioritized (lowercase < uppercase) /// /// # Algorithm /// /// The function works by: -/// 1. Processing strings character by character +/// 1. Processing strings character by character in a case-insensitive manner /// 2. When encountering digits, treating consecutive digits as a single number /// 3. Comparing numbers by their numeric value rather than lexicographically -/// 4. For non-numeric characters, using case-sensitive comparison with lowercase priority +/// 4. For non-numeric characters, using case-insensitive comparison +/// 5. If everything is equal case-insensitively, using case-sensitive comparison as final tie-breaker pub fn natural_sort(a: &str, b: &str) -> Ordering { let mut a_iter = a.chars().peekable(); let mut b_iter = b.chars().peekable(); loop { match (a_iter.peek(), b_iter.peek()) { - (None, None) => return Ordering::Equal, + (None, None) => { + return b.cmp(a); + } (None, _) => return Ordering::Less, (_, None) => return Ordering::Greater, (Some(&a_char), Some(&b_char)) => { @@ -940,7 +998,10 @@ pub fn natural_sort(a: &str, b: &str) -> Ordering { ordering => return ordering, } } else { - match compare_chars(a_char, b_char) { + match a_char + .to_ascii_lowercase() + .cmp(&b_char.to_ascii_lowercase()) + { Ordering::Equal => { a_iter.next(); b_iter.next(); @@ -952,36 +1013,48 @@ pub fn natural_sort(a: &str, b: &str) -> Ordering { } } } + +/// Case-insensitive natural sort without applying the final lowercase/uppercase tie-breaker. +/// This is useful when comparing individual path components where we want to keep walking +/// deeper components before deciding on casing. +fn natural_sort_no_tiebreak(a: &str, b: &str) -> Ordering { + if a.eq_ignore_ascii_case(b) { + Ordering::Equal + } else { + natural_sort(a, b) + } +} + +fn stem_and_extension(filename: &str) -> (Option<&str>, Option<&str>) { + if filename.is_empty() { + return (None, None); + } + + match filename.rsplit_once('.') { + // Case 1: No dot was found. The entire name is the stem. + None => (Some(filename), None), + + // Case 2: A dot was found. + Some((before, after)) => { + // This is the crucial check for dotfiles like ".bashrc". + // If `before` is empty, the dot was the first character. + // In that case, we revert to the "whole name is the stem" logic. + if before.is_empty() { + (Some(filename), None) + } else { + // Otherwise, we have a standard stem and extension. + (Some(before), Some(after)) + } + } + } +} + pub fn compare_rel_paths( (path_a, a_is_file): (&RelPath, bool), (path_b, b_is_file): (&RelPath, bool), ) -> Ordering { let mut components_a = path_a.components(); let mut components_b = path_b.components(); - - fn stem_and_extension(filename: &str) -> (Option<&str>, Option<&str>) { - if filename.is_empty() { - return (None, None); - } - - match filename.rsplit_once('.') { - // Case 1: No dot was found. The entire name is the stem. - None => (Some(filename), None), - - // Case 2: A dot was found. - Some((before, after)) => { - // This is the crucial check for dotfiles like ".bashrc". - // If `before` is empty, the dot was the first character. - // In that case, we revert to the "whole name is the stem" logic. - if before.is_empty() { - (Some(filename), None) - } else { - // Otherwise, we have a standard stem and extension. - (Some(before), Some(after)) - } - } - } - } loop { match (components_a.next(), components_b.next()) { (Some(component_a), Some(component_b)) => { @@ -1028,6 +1101,156 @@ pub fn compare_rel_paths( } } +/// Compare two relative paths with mixed files and directories using +/// case-insensitive natural sorting. For example, "Apple", "aardvark.txt", +/// and "Zebra" would be sorted as: aardvark.txt, Apple, Zebra +/// (case-insensitive alphabetical). +pub fn compare_rel_paths_mixed( + (path_a, a_is_file): (&RelPath, bool), + (path_b, b_is_file): (&RelPath, bool), +) -> Ordering { + let original_paths_equal = std::ptr::eq(path_a, path_b) || path_a == path_b; + let mut components_a = path_a.components(); + let mut components_b = path_b.components(); + + loop { + match (components_a.next(), components_b.next()) { + (Some(component_a), Some(component_b)) => { + let a_leaf_file = a_is_file && components_a.rest().is_empty(); + let b_leaf_file = b_is_file && components_b.rest().is_empty(); + + let (a_stem, a_ext) = a_leaf_file + .then(|| stem_and_extension(component_a)) + .unwrap_or_default(); + let (b_stem, b_ext) = b_leaf_file + .then(|| stem_and_extension(component_b)) + .unwrap_or_default(); + let a_key = if a_leaf_file { + a_stem + } else { + Some(component_a) + }; + let b_key = if b_leaf_file { + b_stem + } else { + Some(component_b) + }; + + let ordering = match (a_key, b_key) { + (Some(a), Some(b)) => natural_sort_no_tiebreak(a, b) + .then_with(|| match (a_leaf_file, b_leaf_file) { + (true, false) if a == b => Ordering::Greater, + (false, true) if a == b => Ordering::Less, + _ => Ordering::Equal, + }) + .then_with(|| { + if a_leaf_file && b_leaf_file { + let a_ext_str = a_ext.unwrap_or_default().to_lowercase(); + let b_ext_str = b_ext.unwrap_or_default().to_lowercase(); + b_ext_str.cmp(&a_ext_str) + } else { + Ordering::Equal + } + }), + (Some(_), None) => Ordering::Greater, + (None, Some(_)) => Ordering::Less, + (None, None) => Ordering::Equal, + }; + + if !ordering.is_eq() { + return ordering; + } + } + (Some(_), None) => return Ordering::Greater, + (None, Some(_)) => return Ordering::Less, + (None, None) => { + // Deterministic tie-break: use natural sort to prefer lowercase when paths + // are otherwise equal but still differ in casing. + if !original_paths_equal { + return natural_sort(path_a.as_unix_str(), path_b.as_unix_str()); + } + return Ordering::Equal; + } + } + } +} + +/// Compare two relative paths with files before directories using +/// case-insensitive natural sorting. At each directory level, all files +/// are sorted before all directories, with case-insensitive alphabetical +/// ordering within each group. +pub fn compare_rel_paths_files_first( + (path_a, a_is_file): (&RelPath, bool), + (path_b, b_is_file): (&RelPath, bool), +) -> Ordering { + let original_paths_equal = std::ptr::eq(path_a, path_b) || path_a == path_b; + let mut components_a = path_a.components(); + let mut components_b = path_b.components(); + + loop { + match (components_a.next(), components_b.next()) { + (Some(component_a), Some(component_b)) => { + let a_leaf_file = a_is_file && components_a.rest().is_empty(); + let b_leaf_file = b_is_file && components_b.rest().is_empty(); + + let (a_stem, a_ext) = a_leaf_file + .then(|| stem_and_extension(component_a)) + .unwrap_or_default(); + let (b_stem, b_ext) = b_leaf_file + .then(|| stem_and_extension(component_b)) + .unwrap_or_default(); + let a_key = if a_leaf_file { + a_stem + } else { + Some(component_a) + }; + let b_key = if b_leaf_file { + b_stem + } else { + Some(component_b) + }; + + let ordering = match (a_key, b_key) { + (Some(a), Some(b)) => { + if a_leaf_file && !b_leaf_file { + Ordering::Less + } else if !a_leaf_file && b_leaf_file { + Ordering::Greater + } else { + natural_sort_no_tiebreak(a, b).then_with(|| { + if a_leaf_file && b_leaf_file { + let a_ext_str = a_ext.unwrap_or_default().to_lowercase(); + let b_ext_str = b_ext.unwrap_or_default().to_lowercase(); + a_ext_str.cmp(&b_ext_str) + } else { + Ordering::Equal + } + }) + } + } + (Some(_), None) => Ordering::Greater, + (None, Some(_)) => Ordering::Less, + (None, None) => Ordering::Equal, + }; + + if !ordering.is_eq() { + return ordering; + } + } + (Some(_), None) => return Ordering::Greater, + (None, Some(_)) => return Ordering::Less, + (None, None) => { + // Deterministic tie-break: use natural sort to prefer lowercase when paths + // are otherwise equal but still differ in casing. + if !original_paths_equal { + return natural_sort(path_a.as_unix_str(), path_b.as_unix_str()); + } + return Ordering::Equal; + } + } + } +} + pub fn compare_paths( (path_a, a_is_file): (&Path, bool), (path_b, b_is_file): (&Path, bool), @@ -1151,6 +1374,8 @@ impl WslPath { #[cfg(test)] mod tests { + use crate::rel_path::rel_path; + use super::*; use util_macros::perf; @@ -1246,6 +1471,312 @@ mod tests { ); } + #[perf] + fn compare_paths_mixed_case_numeric_ordering() { + let mut entries = [ + (Path::new(".config"), false), + (Path::new("Dir1"), false), + (Path::new("dir01"), false), + (Path::new("dir2"), false), + (Path::new("Dir02"), false), + (Path::new("dir10"), false), + (Path::new("Dir10"), false), + ]; + + entries.sort_by(|&a, &b| compare_paths(a, b)); + + let ordered: Vec<&str> = entries + .iter() + .map(|(path, _)| path.to_str().unwrap()) + .collect(); + + assert_eq!( + ordered, + vec![ + ".config", "Dir1", "dir01", "dir2", "Dir02", "dir10", "Dir10" + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_case_insensitive() { + // Test that mixed mode is case-insensitive + let mut paths = vec![ + (RelPath::unix("zebra.txt").unwrap(), true), + (RelPath::unix("Apple").unwrap(), false), + (RelPath::unix("banana.rs").unwrap(), true), + (RelPath::unix("Carrot").unwrap(), false), + (RelPath::unix("aardvark.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + // Case-insensitive: aardvark < Apple < banana < Carrot < zebra + assert_eq!( + paths, + vec![ + (RelPath::unix("aardvark.txt").unwrap(), true), + (RelPath::unix("Apple").unwrap(), false), + (RelPath::unix("banana.rs").unwrap(), true), + (RelPath::unix("Carrot").unwrap(), false), + (RelPath::unix("zebra.txt").unwrap(), true), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_basic() { + // Test that files come before directories + let mut paths = vec![ + (RelPath::unix("zebra.txt").unwrap(), true), + (RelPath::unix("Apple").unwrap(), false), + (RelPath::unix("banana.rs").unwrap(), true), + (RelPath::unix("Carrot").unwrap(), false), + (RelPath::unix("aardvark.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + // Files first (case-insensitive), then directories (case-insensitive) + assert_eq!( + paths, + vec![ + (RelPath::unix("aardvark.txt").unwrap(), true), + (RelPath::unix("banana.rs").unwrap(), true), + (RelPath::unix("zebra.txt").unwrap(), true), + (RelPath::unix("Apple").unwrap(), false), + (RelPath::unix("Carrot").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_case_insensitive() { + // Test case-insensitive sorting within files and directories + let mut paths = vec![ + (RelPath::unix("Zebra.txt").unwrap(), true), + (RelPath::unix("apple").unwrap(), false), + (RelPath::unix("Banana.rs").unwrap(), true), + (RelPath::unix("carrot").unwrap(), false), + (RelPath::unix("Aardvark.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("Aardvark.txt").unwrap(), true), + (RelPath::unix("Banana.rs").unwrap(), true), + (RelPath::unix("Zebra.txt").unwrap(), true), + (RelPath::unix("apple").unwrap(), false), + (RelPath::unix("carrot").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_numeric() { + // Test natural number sorting with files first + let mut paths = vec![ + (RelPath::unix("file10.txt").unwrap(), true), + (RelPath::unix("dir2").unwrap(), false), + (RelPath::unix("file2.txt").unwrap(), true), + (RelPath::unix("dir10").unwrap(), false), + (RelPath::unix("file1.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("file1.txt").unwrap(), true), + (RelPath::unix("file2.txt").unwrap(), true), + (RelPath::unix("file10.txt").unwrap(), true), + (RelPath::unix("dir2").unwrap(), false), + (RelPath::unix("dir10").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_case() { + // Test case-insensitive sorting with varied capitalization + let mut paths = vec![ + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix("readme.txt").unwrap(), true), + (RelPath::unix("ReadMe.rs").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + // All "readme" variants should group together, sorted by extension + assert_eq!( + paths, + vec![ + (RelPath::unix("readme.txt").unwrap(), true), + (RelPath::unix("ReadMe.rs").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_files_and_dirs() { + // Verify directories and files are still mixed + let mut paths = vec![ + (RelPath::unix("file2.txt").unwrap(), true), + (RelPath::unix("Dir1").unwrap(), false), + (RelPath::unix("file1.txt").unwrap(), true), + (RelPath::unix("dir2").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + // Case-insensitive: dir1, dir2, file1, file2 (all mixed) + assert_eq!( + paths, + vec![ + (RelPath::unix("Dir1").unwrap(), false), + (RelPath::unix("dir2").unwrap(), false), + (RelPath::unix("file1.txt").unwrap(), true), + (RelPath::unix("file2.txt").unwrap(), true), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_with_nested_paths() { + // Test that nested paths still work correctly + let mut paths = vec![ + (RelPath::unix("src/main.rs").unwrap(), true), + (RelPath::unix("Cargo.toml").unwrap(), true), + (RelPath::unix("src").unwrap(), false), + (RelPath::unix("target").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("Cargo.toml").unwrap(), true), + (RelPath::unix("src").unwrap(), false), + (RelPath::unix("src/main.rs").unwrap(), true), + (RelPath::unix("target").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_with_nested() { + // Files come before directories, even with nested paths + let mut paths = vec![ + (RelPath::unix("src/lib.rs").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix("src").unwrap(), false), + (RelPath::unix("tests").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix("src").unwrap(), false), + (RelPath::unix("src/lib.rs").unwrap(), true), + (RelPath::unix("tests").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_dotfiles() { + // Test that dotfiles are handled correctly in mixed mode + let mut paths = vec![ + (RelPath::unix(".gitignore").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix(".github").unwrap(), false), + (RelPath::unix("src").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix(".github").unwrap(), false), + (RelPath::unix(".gitignore").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix("src").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_dotfiles() { + // Test that dotfiles come first when they're files + let mut paths = vec![ + (RelPath::unix(".gitignore").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix(".github").unwrap(), false), + (RelPath::unix("src").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix(".gitignore").unwrap(), true), + (RelPath::unix("README.md").unwrap(), true), + (RelPath::unix(".github").unwrap(), false), + (RelPath::unix("src").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_same_stem_different_extension() { + // Files with same stem but different extensions should sort by extension + let mut paths = vec![ + (RelPath::unix("file.rs").unwrap(), true), + (RelPath::unix("file.md").unwrap(), true), + (RelPath::unix("file.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("file.txt").unwrap(), true), + (RelPath::unix("file.rs").unwrap(), true), + (RelPath::unix("file.md").unwrap(), true), + ] + ); + } + + #[perf] + fn compare_rel_paths_files_first_same_stem() { + // Same stem files should still sort by extension with files_first + let mut paths = vec![ + (RelPath::unix("main.rs").unwrap(), true), + (RelPath::unix("main.c").unwrap(), true), + (RelPath::unix("main").unwrap(), false), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_files_first(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("main.c").unwrap(), true), + (RelPath::unix("main.rs").unwrap(), true), + (RelPath::unix("main").unwrap(), false), + ] + ); + } + + #[perf] + fn compare_rel_paths_mixed_deep_nesting() { + // Test sorting with deeply nested paths + let mut paths = vec![ + (RelPath::unix("a/b/c.txt").unwrap(), true), + (RelPath::unix("A/B.txt").unwrap(), true), + (RelPath::unix("a.txt").unwrap(), true), + (RelPath::unix("A.txt").unwrap(), true), + ]; + paths.sort_by(|&a, &b| compare_rel_paths_mixed(a, b)); + assert_eq!( + paths, + vec![ + (RelPath::unix("A/B.txt").unwrap(), true), + (RelPath::unix("a/b/c.txt").unwrap(), true), + (RelPath::unix("a.txt").unwrap(), true), + (RelPath::unix("A.txt").unwrap(), true), + ] + ); + } + #[perf] fn path_with_position_parse_posix_path() { // Test POSIX filename edge cases @@ -1563,42 +2094,41 @@ mod tests { } #[perf] - fn edge_of_glob() { - let path = Path::new("/work/node_modules"); - let path_matcher = - PathMatcher::new(&["**/node_modules/**".to_owned()], PathStyle::Posix).unwrap(); - assert!( - path_matcher.is_match(path), - "Path matcher should match {path:?}" - ); - } - - #[perf] - fn file_in_dirs() { - let path = Path::new("/work/.env"); - let path_matcher = PathMatcher::new(&["**/.env".to_owned()], PathStyle::Posix).unwrap(); - assert!( - path_matcher.is_match(path), - "Path matcher should match {path:?}" - ); - let path = Path::new("/work/package.json"); - assert!( - !path_matcher.is_match(path), - "Path matcher should not match {path:?}" - ); - } - - #[perf] - fn project_search() { - let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules"); - let path_matcher = - PathMatcher::new(&["**/node_modules/**".to_owned()], PathStyle::Posix).unwrap(); - assert!( - path_matcher.is_match(path), - "Path matcher should match {path:?}" - ); - } - + // fn edge_of_glob() { + // let path = Path::new("/work/node_modules"); + // let path_matcher = + // PathMatcher::new(&["**/node_modules/**".to_owned()], PathStyle::Posix).unwrap(); + // assert!( + // path_matcher.is_match(path), + // "Path matcher should match {path:?}" + // ); + // } + + // #[perf] + // fn file_in_dirs() { + // let path = Path::new("/work/.env"); + // let path_matcher = PathMatcher::new(&["**/.env".to_owned()], PathStyle::Posix).unwrap(); + // assert!( + // path_matcher.is_match(path), + // "Path matcher should match {path:?}" + // ); + // let path = Path::new("/work/package.json"); + // assert!( + // !path_matcher.is_match(path), + // "Path matcher should not match {path:?}" + // ); + // } + + // #[perf] + // fn project_search() { + // let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules"); + // let path_matcher = + // PathMatcher::new(&["**/node_modules/**".to_owned()], PathStyle::Posix).unwrap(); + // assert!( + // path_matcher.is_match(path), + // "Path matcher should match {path:?}" + // ); + // } #[perf] #[cfg(target_os = "windows")] fn test_sanitized_path() { @@ -1917,10 +2447,25 @@ mod tests { ), Ordering::Less ); + } - // Mixed case with numbers - assert_eq!(natural_sort("File1", "file2"), Ordering::Greater); + #[perf] + fn test_natural_sort_case_sensitive() { + // Numerically smaller values come first. + assert_eq!(natural_sort("File1", "file2"), Ordering::Less); assert_eq!(natural_sort("file1", "File2"), Ordering::Less); + + // Numerically equal values: the case-insensitive comparison decides first. + // Case-sensitive comparison only occurs when both are equal case-insensitively. + assert_eq!(natural_sort("Dir1", "dir01"), Ordering::Less); + assert_eq!(natural_sort("dir2", "Dir02"), Ordering::Less); + assert_eq!(natural_sort("dir2", "dir02"), Ordering::Less); + + // Numerically equal and case-insensitively equal: + // the lexicographically smaller (case-sensitive) one wins. + assert_eq!(natural_sort("dir1", "Dir1"), Ordering::Less); + assert_eq!(natural_sort("dir02", "Dir02"), Ordering::Less); + assert_eq!(natural_sort("dir10", "Dir10"), Ordering::Less); } #[perf] @@ -1999,6 +2544,89 @@ mod tests { assert_eq!(strip_path_suffix(base, suffix), None); } + #[test] + fn test_strip_prefix() { + let expected = [ + ( + PathStyle::Posix, + "/a/b/c", + "/a/b", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Posix, + "/a/b/c", + "/a/b/", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Posix, + "/a/b/c", + "/", + Some(rel_path("a/b/c").into_arc()), + ), + (PathStyle::Posix, "/a/b/c", "", None), + (PathStyle::Posix, "/a/b//c", "/a/b/", None), + (PathStyle::Posix, "/a/bc", "/a/b", None), + ( + PathStyle::Posix, + "/a/b/c", + "/a/b/c", + Some(rel_path("").into_arc()), + ), + ( + PathStyle::Windows, + "C:\\a\\b\\c", + "C:\\a\\b", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Windows, + "C:\\a\\b\\c", + "C:\\a\\b\\", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Windows, + "C:\\a\\b\\c", + "C:\\", + Some(rel_path("a/b/c").into_arc()), + ), + (PathStyle::Windows, "C:\\a\\b\\c", "", None), + (PathStyle::Windows, "C:\\a\\b\\\\c", "C:\\a\\b\\", None), + (PathStyle::Windows, "C:\\a\\bc", "C:\\a\\b", None), + ( + PathStyle::Windows, + "C:\\a\\b/c", + "C:\\a\\b", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Windows, + "C:\\a\\b/c", + "C:\\a\\b\\", + Some(rel_path("c").into_arc()), + ), + ( + PathStyle::Windows, + "C:\\a\\b/c", + "C:\\a\\b/", + Some(rel_path("c").into_arc()), + ), + ]; + let actual = expected.clone().map(|(style, child, parent, _)| { + ( + style, + child, + parent, + style + .strip_prefix(child.as_ref(), parent.as_ref()) + .map(|rel_path| rel_path.into_arc()), + ) + }); + pretty_assertions::assert_eq!(actual, expected); + } + #[cfg(target_os = "windows")] #[test] fn test_wsl_path() { diff --git a/crates/util/src/rel_path.rs b/crates/util/src/rel_path.rs index b360297f209c54c6a33b174a738ed1876fbc16a0..5e20aacad5fe177cd1af65dc98aeb45565a3082e 100644 --- a/crates/util/src/rel_path.rs +++ b/crates/util/src/rel_path.rs @@ -27,7 +27,7 @@ pub struct RelPath(str); /// relative and normalized. /// /// This type is to [`RelPath`] as [`std::path::PathBuf`] is to [`std::path::Path`] -#[derive(Clone, Serialize, Deserialize)] +#[derive(PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct RelPathBuf(String); impl RelPath { @@ -161,7 +161,7 @@ impl RelPath { false } - pub fn strip_prefix<'a>(&'a self, other: &Self) -> Result<&'a Self> { + pub fn strip_prefix<'a>(&'a self, other: &Self) -> Result<&'a Self, StripPrefixError> { if other.is_empty() { return Ok(self); } @@ -172,7 +172,7 @@ impl RelPath { return Ok(Self::empty()); } } - Err(anyhow!("failed to strip prefix: {other:?} from {self:?}")) + Err(StripPrefixError) } pub fn len(&self) -> usize { @@ -228,7 +228,8 @@ impl RelPath { pub fn display(&self, style: PathStyle) -> Cow<'_, str> { match style { PathStyle::Posix => Cow::Borrowed(&self.0), - PathStyle::Windows => Cow::Owned(self.0.replace('/', "\\")), + PathStyle::Windows if self.0.contains('/') => Cow::Owned(self.0.replace('/', "\\")), + PathStyle::Windows => Cow::Borrowed(&self.0), } } @@ -250,6 +251,9 @@ impl RelPath { } } +#[derive(Debug)] +pub struct StripPrefixError; + impl ToOwned for RelPath { type Owned = RelPathBuf; @@ -341,6 +345,12 @@ impl AsRef for RelPathBuf { } } +impl AsRef for RelPath { + fn as_ref(&self) -> &RelPath { + self + } +} + impl Deref for RelPathBuf { type Target = RelPath; @@ -374,6 +384,7 @@ impl PartialEq for RelPath { } } +#[derive(Default)] pub struct RelPathComponents<'a>(&'a str); pub struct RelPathAncestors<'a>(Option<&'a str>); diff --git a/crates/util/src/schemars.rs b/crates/util/src/schemars.rs index 9314eda4ac4d5003d7186c3115137e2e54c66794..8124ca8cfef62cb4ea320da6423d7ad95a09eb78 100644 --- a/crates/util/src/schemars.rs +++ b/crates/util/src/schemars.rs @@ -53,3 +53,20 @@ impl schemars::transform::Transform for DefaultDenyUnknownFields { transform_subschemas(self, schema); } } + +/// Defaults `allowTrailingCommas` to `true`, for use with `json-language-server`. +/// This can be applied to any schema that will be treated as `jsonc`. +/// +/// Note that this is non-recursive and only applied to the root schema. +#[derive(Clone)] +pub struct AllowTrailingCommas; + +impl schemars::transform::Transform for AllowTrailingCommas { + fn transform(&mut self, schema: &mut schemars::Schema) { + if let Some(object) = schema.as_object_mut() + && !object.contains_key("allowTrailingCommas") + { + object.insert("allowTrailingCommas".to_string(), true.into()); + } + } +} diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index ba54f7b7784b45613b28067afe2748339e6b6c64..d51cb39aedd89908db9608f5961688d4b30afc9b 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -56,7 +56,10 @@ pub enum ShellKind { Tcsh, Rc, Fish, + /// Pre-installed "legacy" powershell for windows PowerShell, + /// PowerShell 7.x + Pwsh, Nushell, Cmd, Xonsh, @@ -79,29 +82,42 @@ pub fn get_default_system_shell() -> String { } } -/// Get the default system shell, preferring git-bash on Windows. +/// Get the default system shell, preferring bash on Windows. pub fn get_default_system_shell_preferring_bash() -> String { if cfg!(windows) { - get_windows_git_bash().unwrap_or_else(|| get_windows_system_shell()) + get_windows_bash().unwrap_or_else(|| get_windows_system_shell()) } else { "/bin/sh".to_string() } } -pub fn get_windows_git_bash() -> Option { - static GIT_BASH: LazyLock> = LazyLock::new(|| { +pub fn get_windows_bash() -> Option { + use std::path::PathBuf; + + fn find_bash_in_scoop() -> Option { + let bash_exe = + PathBuf::from(std::env::var_os("USERPROFILE")?).join("scoop\\shims\\bash.exe"); + bash_exe.exists().then_some(bash_exe) + } + + fn find_bash_in_git() -> Option { // /path/to/git/cmd/git.exe/../../bin/bash.exe let git = which::which("git").ok()?; let git_bash = git.parent()?.parent()?.join("bin").join("bash.exe"); - if git_bash.is_file() { - log::info!("Found git-bash at {}", git_bash.display()); - Some(git_bash.to_string_lossy().to_string()) - } else { - None + git_bash.exists().then_some(git_bash) + } + + static BASH: LazyLock> = LazyLock::new(|| { + let bash = find_bash_in_scoop() + .or_else(|| find_bash_in_git()) + .map(|p| p.to_string_lossy().into_owned()); + if let Some(ref path) = bash { + log::info!("Found bash at {}", path); } + bash }); - (*GIT_BASH).clone() + (*BASH).clone() } pub fn get_windows_system_shell() -> String { @@ -191,14 +207,22 @@ pub fn get_windows_system_shell() -> String { } static SYSTEM_SHELL: LazyLock = LazyLock::new(|| { - find_pwsh_in_programfiles(false, false) - .or_else(|| find_pwsh_in_programfiles(true, false)) - .or_else(|| find_pwsh_in_msix(false)) - .or_else(|| find_pwsh_in_programfiles(false, true)) - .or_else(|| find_pwsh_in_msix(true)) - .or_else(|| find_pwsh_in_programfiles(true, true)) - .or_else(find_pwsh_in_scoop) - .map(|p| p.to_string_lossy().into_owned()) + let locations = [ + || find_pwsh_in_programfiles(false, false), + || find_pwsh_in_programfiles(true, false), + || find_pwsh_in_msix(false), + || find_pwsh_in_programfiles(false, true), + || find_pwsh_in_msix(true), + || find_pwsh_in_programfiles(true, true), + || find_pwsh_in_scoop(), + || which::which_global("pwsh.exe").ok(), + || which::which_global("powershell.exe").ok(), + ]; + + locations + .into_iter() + .find_map(|f| f()) + .map(|p| p.to_string_lossy().trim().to_owned()) .inspect(|shell| log::info!("Found powershell in: {}", shell)) .unwrap_or_else(|| { log::warn!("Powershell not found, falling back to `cmd`"); @@ -217,6 +241,7 @@ impl fmt::Display for ShellKind { ShellKind::Tcsh => write!(f, "tcsh"), ShellKind::Fish => write!(f, "fish"), ShellKind::PowerShell => write!(f, "powershell"), + ShellKind::Pwsh => write!(f, "pwsh"), ShellKind::Nushell => write!(f, "nu"), ShellKind::Cmd => write!(f, "cmd"), ShellKind::Rc => write!(f, "rc"), @@ -239,7 +264,8 @@ impl ShellKind { .to_string_lossy(); match &*program { - "powershell" | "pwsh" => ShellKind::PowerShell, + "powershell" => ShellKind::PowerShell, + "pwsh" => ShellKind::Pwsh, "cmd" => ShellKind::Cmd, "nu" => ShellKind::Nushell, "fish" => ShellKind::Fish, @@ -258,7 +284,7 @@ impl ShellKind { pub fn to_shell_variable(self, input: &str) -> String { match self { - Self::PowerShell => Self::to_powershell_variable(input), + Self::PowerShell | Self::Pwsh => Self::to_powershell_variable(input), Self::Cmd => Self::to_cmd_variable(input), Self::Posix => input.to_owned(), Self::Fish => input.to_owned(), @@ -386,8 +412,12 @@ impl ShellKind { pub fn args_for_shell(&self, interactive: bool, combined_command: String) -> Vec { match self { - ShellKind::PowerShell => vec!["-C".to_owned(), combined_command], - ShellKind::Cmd => vec!["/C".to_owned(), combined_command], + ShellKind::PowerShell | ShellKind::Pwsh => vec!["-C".to_owned(), combined_command], + ShellKind::Cmd => vec![ + "/S".to_owned(), + "/C".to_owned(), + format!("\"{combined_command}\""), + ], ShellKind::Posix | ShellKind::Nushell | ShellKind::Fish @@ -405,7 +435,7 @@ impl ShellKind { pub const fn command_prefix(&self) -> Option { match self { - ShellKind::PowerShell => Some('&'), + ShellKind::PowerShell | ShellKind::Pwsh => Some('&'), ShellKind::Nushell => Some('^'), ShellKind::Posix | ShellKind::Csh @@ -436,6 +466,7 @@ impl ShellKind { | ShellKind::Rc | ShellKind::Fish | ShellKind::PowerShell + | ShellKind::Pwsh | ShellKind::Nushell | ShellKind::Xonsh | ShellKind::Elvish => ';', @@ -450,6 +481,7 @@ impl ShellKind { | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish + | ShellKind::Pwsh | ShellKind::PowerShell | ShellKind::Xonsh => "&&", ShellKind::Nushell | ShellKind::Elvish => ";", @@ -457,11 +489,10 @@ impl ShellKind { } pub fn try_quote<'a>(&self, arg: &'a str) -> Option> { - shlex::try_quote(arg).ok().map(|arg| match self { - // If we are running in PowerShell, we want to take extra care when escaping strings. - // In particular, we want to escape strings with a backtick (`) rather than a backslash (\). - ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"").replace("\\\\", "\\")), - ShellKind::Cmd => Cow::Owned(arg.replace("\\\\", "\\")), + match self { + ShellKind::PowerShell => Some(Self::quote_powershell(arg)), + ShellKind::Pwsh => Some(Self::quote_pwsh(arg)), + ShellKind::Cmd => Some(Self::quote_cmd(arg)), ShellKind::Posix | ShellKind::Csh | ShellKind::Tcsh @@ -469,8 +500,173 @@ impl ShellKind { | ShellKind::Fish | ShellKind::Nushell | ShellKind::Xonsh - | ShellKind::Elvish => arg, - }) + | ShellKind::Elvish => shlex::try_quote(arg).ok(), + } + } + + fn quote_windows(arg: &str, enclose: bool) -> Cow<'_, str> { + if arg.is_empty() { + return Cow::Borrowed("\"\""); + } + + let needs_quoting = arg.chars().any(|c| c == ' ' || c == '\t' || c == '"'); + if !needs_quoting { + return Cow::Borrowed(arg); + } + + let mut result = String::with_capacity(arg.len() + 2); + + if enclose { + result.push('"'); + } + + let chars: Vec = arg.chars().collect(); + let mut i = 0; + + while i < chars.len() { + if chars[i] == '\\' { + let mut num_backslashes = 0; + while i < chars.len() && chars[i] == '\\' { + num_backslashes += 1; + i += 1; + } + + if i < chars.len() && chars[i] == '"' { + // Backslashes followed by quote: double the backslashes and escape the quote + for _ in 0..(num_backslashes * 2 + 1) { + result.push('\\'); + } + result.push('"'); + i += 1; + } else if i >= chars.len() { + // Trailing backslashes: double them (they precede the closing quote) + for _ in 0..(num_backslashes * 2) { + result.push('\\'); + } + } else { + // Backslashes not followed by quote: output as-is + for _ in 0..num_backslashes { + result.push('\\'); + } + } + } else if chars[i] == '"' { + // Quote not preceded by backslash: escape it + result.push('\\'); + result.push('"'); + i += 1; + } else { + result.push(chars[i]); + i += 1; + } + } + + if enclose { + result.push('"'); + } + Cow::Owned(result) + } + + fn needs_quoting_powershell(s: &str) -> bool { + s.is_empty() + || s.chars().any(|c| { + c.is_whitespace() + || matches!( + c, + '"' | '`' + | '$' + | '&' + | '|' + | '<' + | '>' + | ';' + | '(' + | ')' + | '[' + | ']' + | '{' + | '}' + | ',' + | '\'' + | '@' + ) + }) + } + + fn need_quotes_powershell(arg: &str) -> bool { + let mut quote_count = 0; + for c in arg.chars() { + if c == '"' { + quote_count += 1; + } else if c.is_whitespace() && (quote_count % 2 == 0) { + return true; + } + } + false + } + + fn escape_powershell_quotes(s: &str) -> String { + let mut result = String::with_capacity(s.len() + 4); + result.push('\''); + for c in s.chars() { + if c == '\'' { + result.push('\''); + } + result.push(c); + } + result.push('\''); + result + } + + pub fn quote_powershell(arg: &str) -> Cow<'_, str> { + let ps_will_quote = Self::need_quotes_powershell(arg); + let crt_quoted = Self::quote_windows(arg, !ps_will_quote); + + if !Self::needs_quoting_powershell(arg) { + return crt_quoted; + } + + Cow::Owned(Self::escape_powershell_quotes(&crt_quoted)) + } + + pub fn quote_pwsh(arg: &str) -> Cow<'_, str> { + if arg.is_empty() { + return Cow::Borrowed("''"); + } + + if !Self::needs_quoting_powershell(arg) { + return Cow::Borrowed(arg); + } + + Cow::Owned(Self::escape_powershell_quotes(arg)) + } + + pub fn quote_cmd(arg: &str) -> Cow<'_, str> { + let crt_quoted = Self::quote_windows(arg, true); + + let needs_cmd_escaping = crt_quoted.contains('"') + || crt_quoted.contains('%') + || crt_quoted + .chars() + .any(|c| matches!(c, '^' | '<' | '>' | '&' | '|' | '(' | ')')); + + if !needs_cmd_escaping { + return crt_quoted; + } + + let mut result = String::with_capacity(crt_quoted.len() * 2); + for c in crt_quoted.chars() { + match c { + '^' | '"' | '<' | '>' | '&' | '|' | '(' | ')' => { + result.push('^'); + result.push(c); + } + '%' => { + result.push_str("%%cd:~,%"); + } + _ => result.push(c), + } + } + Cow::Owned(result) } /// Quotes the given argument if necessary, taking into account the command prefix. @@ -506,7 +702,10 @@ impl ShellKind { .map(|quoted| Cow::Owned(self.prepend_command_prefix("ed).into_owned())); } } - self.try_quote(arg) + self.try_quote(arg).map(|quoted| match quoted { + unquoted @ Cow::Borrowed(_) => unquoted, + Cow::Owned(quoted) => Cow::Owned(self.prepend_command_prefix("ed).into_owned()), + }) } pub fn split(&self, input: &str) -> Option> { @@ -517,7 +716,7 @@ impl ShellKind { match self { ShellKind::Cmd => "", ShellKind::Nushell => "overlay use", - ShellKind::PowerShell => ".", + ShellKind::PowerShell | ShellKind::Pwsh => ".", ShellKind::Fish | ShellKind::Csh | ShellKind::Tcsh @@ -537,6 +736,7 @@ impl ShellKind { | ShellKind::Rc | ShellKind::Fish | ShellKind::PowerShell + | ShellKind::Pwsh | ShellKind::Nushell | ShellKind::Xonsh | ShellKind::Elvish => "clear", @@ -555,6 +755,7 @@ impl ShellKind { | ShellKind::Rc | ShellKind::Fish | ShellKind::PowerShell + | ShellKind::Pwsh | ShellKind::Nushell | ShellKind::Xonsh | ShellKind::Elvish => true, @@ -584,7 +785,7 @@ mod tests { .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"") .unwrap() .into_owned(), - "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest `\"test_foo.py::test_foo`\"\"".to_string() + "'C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"'".to_string() ); } @@ -596,7 +797,113 @@ mod tests { .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"") .unwrap() .into_owned(), - "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string() + "^\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\^\"test_foo.py::test_foo\\^\"^\"".to_string() + ); + } + + #[test] + fn test_try_quote_powershell_edge_cases() { + let shell_kind = ShellKind::PowerShell; + + // Empty string + assert_eq!( + shell_kind.try_quote("").unwrap().into_owned(), + "'\"\"'".to_string() + ); + + // String without special characters (no quoting needed) + assert_eq!(shell_kind.try_quote("simple").unwrap(), "simple"); + + // String with spaces + assert_eq!( + shell_kind.try_quote("hello world").unwrap().into_owned(), + "'hello world'".to_string() + ); + + // String with dollar signs + assert_eq!( + shell_kind.try_quote("$variable").unwrap().into_owned(), + "'$variable'".to_string() + ); + + // String with backticks + assert_eq!( + shell_kind.try_quote("test`command").unwrap().into_owned(), + "'test`command'".to_string() + ); + + // String with multiple special characters + assert_eq!( + shell_kind + .try_quote("test `\"$var`\" end") + .unwrap() + .into_owned(), + "'test `\\\"$var`\\\" end'".to_string() + ); + + // String with backslashes and colon (path without spaces doesn't need quoting) + assert_eq!( + shell_kind.try_quote("C:\\path\\to\\file").unwrap(), + "C:\\path\\to\\file" + ); + } + + #[test] + fn test_try_quote_cmd_edge_cases() { + let shell_kind = ShellKind::Cmd; + + // Empty string + assert_eq!( + shell_kind.try_quote("").unwrap().into_owned(), + "^\"^\"".to_string() + ); + + // String without special characters (no quoting needed) + assert_eq!(shell_kind.try_quote("simple").unwrap(), "simple"); + + // String with spaces + assert_eq!( + shell_kind.try_quote("hello world").unwrap().into_owned(), + "^\"hello world^\"".to_string() + ); + + // String with space and backslash (backslash not at end, so not doubled) + assert_eq!( + shell_kind.try_quote("path\\ test").unwrap().into_owned(), + "^\"path\\ test^\"".to_string() + ); + + // String ending with backslash (must be doubled before closing quote) + assert_eq!( + shell_kind.try_quote("test path\\").unwrap().into_owned(), + "^\"test path\\\\^\"".to_string() + ); + + // String ending with multiple backslashes (all doubled before closing quote) + assert_eq!( + shell_kind.try_quote("test path\\\\").unwrap().into_owned(), + "^\"test path\\\\\\\\^\"".to_string() + ); + + // String with embedded quote (quote is escaped, backslash before it is doubled) + assert_eq!( + shell_kind.try_quote("test\\\"quote").unwrap().into_owned(), + "^\"test\\\\\\^\"quote^\"".to_string() + ); + + // String with multiple backslashes before embedded quote (all doubled) + assert_eq!( + shell_kind + .try_quote("test\\\\\"quote") + .unwrap() + .into_owned(), + "^\"test\\\\\\\\\\^\"quote^\"".to_string() + ); + + // String with backslashes not before quotes (path without spaces doesn't need quoting) + assert_eq!( + shell_kind.try_quote("C:\\path\\to\\file").unwrap(), + "C:\\path\\to\\file" ); } @@ -612,7 +919,7 @@ mod tests { .try_quote_prefix_aware("'uname'") .unwrap() .into_owned(), - "\"'uname'\"".to_string() + "^\"'uname'\"".to_string() ); assert_eq!( shell_kind.try_quote("^uname").unwrap().into_owned(), @@ -645,7 +952,7 @@ mod tests { .try_quote_prefix_aware("'uname a'") .unwrap() .into_owned(), - "\"'uname a'\"".to_string() + "^\"'uname a'\"".to_string() ); assert_eq!( shell_kind.try_quote("^'uname a'").unwrap().into_owned(), diff --git a/crates/util/src/shell_builder.rs b/crates/util/src/shell_builder.rs index a4a0d21018447d229a6a95c4bf897804b5d6eaf9..436c07172368793e685d1ba4b1014ac38be13b73 100644 --- a/crates/util/src/shell_builder.rs +++ b/crates/util/src/shell_builder.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use crate::shell::get_system_shell; use crate::shell::{Shell, ShellKind}; @@ -42,7 +44,7 @@ impl ShellBuilder { self.program.clone() } else { match self.kind { - ShellKind::PowerShell => { + ShellKind::PowerShell | ShellKind::Pwsh => { format!("{} -C '{}'", self.program, command_to_use_in_label) } ShellKind::Cmd => { @@ -76,6 +78,64 @@ impl ShellBuilder { mut self, task_command: Option, task_args: &[String], + ) -> (String, Vec) { + if let Some(task_command) = task_command { + let task_command = if !task_args.is_empty() { + match self.kind.try_quote_prefix_aware(&task_command) { + Some(task_command) => task_command.into_owned(), + None => task_command, + } + } else { + task_command + }; + let mut combined_command = task_args.iter().fold(task_command, |mut command, arg| { + command.push(' '); + let shell_variable = self.kind.to_shell_variable(arg); + command.push_str(&match self.kind.try_quote(&shell_variable) { + Some(shell_variable) => shell_variable, + None => Cow::Owned(shell_variable), + }); + command + }); + if self.redirect_stdin { + match self.kind { + ShellKind::Fish => { + combined_command.insert_str(0, "begin; "); + combined_command.push_str("; end { + combined_command.insert(0, '('); + combined_command.push_str(") { + combined_command.insert_str(0, "$null | & {"); + combined_command.push_str("}"); + } + ShellKind::Cmd => { + combined_command.push_str("< NUL"); + } + } + } + + self.args + .extend(self.kind.args_for_shell(self.interactive, combined_command)); + } + + (self.program, self.args) + } + + // This should not exist, but our task infra is broken beyond repair right now + #[doc(hidden)] + pub fn build_no_quote( + mut self, + task_command: Option, + task_args: &[String], ) -> (String, Vec) { if let Some(task_command) = task_command { let mut combined_command = task_args.iter().fold(task_command, |mut command, arg| { @@ -99,7 +159,7 @@ impl ShellBuilder { combined_command.insert(0, '('); combined_command.push_str(") { + ShellKind::PowerShell | ShellKind::Pwsh => { combined_command.insert_str(0, "$null | & {"); combined_command.push_str("}"); } @@ -115,6 +175,48 @@ impl ShellBuilder { (self.program, self.args) } + + /// Builds a command with the given task command and arguments. + /// + /// Prefer this over manually constructing a command with the output of `Self::build`, + /// as this method handles `cmd` weirdness on windows correctly. + pub fn build_command( + self, + mut task_command: Option, + task_args: &[String], + ) -> smol::process::Command { + #[cfg(windows)] + let kind = self.kind; + if task_args.is_empty() { + task_command = task_command + .as_ref() + .map(|cmd| self.kind.try_quote_prefix_aware(&cmd).map(Cow::into_owned)) + .unwrap_or(task_command); + } + let (program, args) = self.build(task_command, task_args); + + let mut child = crate::command::new_smol_command(program); + + #[cfg(windows)] + if kind == ShellKind::Cmd { + use smol::process::windows::CommandExt; + + for arg in args { + child.raw_arg(arg); + } + } else { + child.args(args); + } + + #[cfg(not(windows))] + child.args(args); + + child + } + + pub fn kind(&self) -> ShellKind { + self.kind + } } #[cfg(test)] @@ -144,7 +246,7 @@ mod test { vec![ "-i", "-c", - "echo $env.hello $env.world nothing --($env.something) $ ${test" + "echo '$env.hello' '$env.world' nothing '--($env.something)' '$' '${test'" ] ); } @@ -174,4 +276,23 @@ mod test { assert_eq!(program, "fish"); assert_eq!(args, vec!["-i", "-c", "begin; echo test; end Result> { use std::os::unix::process::CommandExt; + use crate::command::new_std_command; + let shell_kind = ShellKind::new(shell_path, false); let zed_path = super::get_shell_safe_zed_path(shell_kind)?; let mut command_string = String::new(); - let mut command = std::process::Command::new(shell_path); + let mut command = new_std_command(shell_path); command.args(args); // In some shells, file descriptors greater than 2 cannot be used in interactive mode, // so file descriptor 0 (stdin) is used instead. This impacts zsh, old bash; perhaps others. @@ -53,6 +55,7 @@ async fn capture_unix( // xonsh doesn't support redirecting to stdin, and control sequences are printed to // stdout on startup ShellKind::Xonsh => (FD_STDERR, "o>e".to_string()), + ShellKind::PowerShell => (FD_STDIN, format!(">{}", FD_STDIN)), _ => (FD_STDIN, format!(">&{}", FD_STDIN)), // `>&0` }; @@ -129,7 +132,7 @@ async fn spawn_and_read_fd( #[cfg(windows)] async fn capture_windows( shell_path: &Path, - _args: &[String], + args: &[String], directory: &Path, ) -> Result> { use std::process::Stdio; @@ -138,17 +141,17 @@ async fn capture_windows( std::env::current_exe().context("Failed to determine current zed executable path.")?; let shell_kind = ShellKind::new(shell_path, true); - if let ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh = - shell_kind - { - return Err(anyhow::anyhow!("unsupported shell kind")); - } let mut cmd = crate::command::new_smol_command(shell_path); + cmd.args(args); let cmd = match shell_kind { - ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh => { - unreachable!() - } - ShellKind::Posix => cmd.args([ + ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::Xonsh + | ShellKind::Posix => cmd.args([ + "-l", + "-i", "-c", &format!( "cd '{}'; '{}' --printenv", @@ -156,7 +159,7 @@ async fn capture_windows( zed_path.display() ), ]), - ShellKind::PowerShell => cmd.args([ + ShellKind::PowerShell | ShellKind::Pwsh => cmd.args([ "-NonInteractive", "-NoProfile", "-Command", diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 211b972e69deb9edf5c045a8fc2d52f5b8115bb2..4ea35901963523180eb6df7534565fd77ebb2585 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -51,6 +51,12 @@ macro_rules! debug_panic { }; } +#[inline] +pub const fn is_utf8_char_boundary(u8: u8) -> bool { + // This is bit magic equivalent to: b < 128 || b >= 192 + (u8 as i8) >= -0x40 +} + pub fn truncate(s: &str, max_chars: usize) -> &str { match s.char_indices().nth(max_chars) { None => s, @@ -384,6 +390,8 @@ pub fn set_pre_exec_to_start_new_session( use std::os::unix::process::CommandExt; command.pre_exec(|| { libc::setsid(); + #[cfg(target_os = "macos")] + crate::command::reset_exception_ports(); Ok(()) }); }; diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 4cea29508f437d6753a78155965b94259a2d7884..2db1b51e72fcd862ccb1c35ff920fec7dbd47995 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -35,7 +35,6 @@ multi_buffer.workspace = true nvim-rs = { git = "https://github.com/KillTheMule/nvim-rs", rev = "764dd270c642f77f10f3e19d05cc178a6cbe69f3", features = ["use_tokio"], optional = true } picker.workspace = true project.workspace = true -project_panel.workspace = true regex.workspace = true schemars.workspace = true search.workspace = true @@ -56,7 +55,7 @@ zed_actions.workspace = true [dev-dependencies] assets.workspace = true -command_palette.workspace = true +command_palette = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } git_ui.workspace = true gpui = { workspace = true, features = ["test-support"] } @@ -64,9 +63,12 @@ indoc.workspace = true language = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } +markdown_preview.workspace = true parking_lot.workspace = true project_panel.workspace = true +outline_panel.workspace = true release_channel.workspace = true +semver.workspace = true settings_ui.workspace = true settings.workspace = true perf.workspace = true diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index cba8351e8d36e784c77c20b15ac0dead41f84a13..205097130d152fe255feb02a449956124586d8e6 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -189,6 +189,7 @@ pub struct VimSet { #[derive(Clone, PartialEq, Action)] #[action(namespace = vim, no_json, no_register)] struct VimSave { + pub range: Option, pub save_intent: Option, pub filename: String, } @@ -324,6 +325,136 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { }); Vim::action(editor, cx, |vim, action: &VimSave, window, cx| { + if let Some(range) = &action.range { + vim.update_editor(cx, |vim, editor, cx| { + let Some(range) = range.buffer_range(vim, editor, window, cx).ok() else { + return; + }; + let Some((line_ending, encoding, has_bom, text, whole_buffer)) = editor.buffer().update(cx, |multi, cx| { + Some(multi.as_singleton()?.update(cx, |buffer, _| { + ( + buffer.line_ending(), + buffer.encoding(), + buffer.has_bom(), + buffer.as_rope().slice_rows(range.start.0..range.end.0 + 1), + range.start.0 == 0 && range.end.0 + 1 >= buffer.row_count(), + ) + })) + }) else { + return; + }; + + let filename = action.filename.clone(); + let filename = if filename.is_empty() { + let Some(file) = editor + .buffer() + .read(cx) + .as_singleton() + .and_then(|buffer| buffer.read(cx).file()) + else { + let _ = window.prompt( + gpui::PromptLevel::Warning, + "No file name", + Some("Partial buffer write requires file name."), + &["Cancel"], + cx, + ); + return; + }; + file.path().display(file.path_style(cx)).to_string() + } else { + filename + }; + + if action.filename.is_empty() { + if whole_buffer { + if let Some(workspace) = vim.workspace(window) { + workspace.update(cx, |workspace, cx| { + workspace + .save_active_item( + action.save_intent.unwrap_or(SaveIntent::Save), + window, + cx, + ) + .detach_and_prompt_err("Failed to save", window, cx, |_, _, _| None); + }); + } + return; + } + if Some(SaveIntent::Overwrite) != action.save_intent { + let _ = window.prompt( + gpui::PromptLevel::Warning, + "Use ! to write partial buffer", + Some("Overwriting the current file with selected buffer content requires '!'."), + &["Cancel"], + cx, + ); + return; + } + editor.buffer().update(cx, |multi, cx| { + if let Some(buffer) = multi.as_singleton() { + buffer.update(cx, |buffer, _| buffer.set_conflict()); + } + }); + }; + + editor.project().unwrap().update(cx, |project, cx| { + let worktree = project.visible_worktrees(cx).next().unwrap(); + + worktree.update(cx, |worktree, cx| { + let path_style = worktree.path_style(); + let Some(path) = RelPath::new(Path::new(&filename), path_style).ok() else { + return; + }; + + let rx = (worktree.entry_for_path(&path).is_some() && Some(SaveIntent::Overwrite) != action.save_intent).then(|| { + window.prompt( + gpui::PromptLevel::Warning, + &format!("{path:?} already exists. Do you want to replace it?"), + Some( + "A file or folder with the same name already exists. Replacing it will overwrite its current contents.", + ), + &["Replace", "Cancel"], + cx + ) + }); + let filename = filename.clone(); + cx.spawn_in(window, async move |this, cx| { + if let Some(rx) = rx + && Ok(0) != rx.await + { + return; + } + + let _ = this.update_in(cx, |worktree, window, cx| { + let Some(path) = RelPath::new(Path::new(&filename), path_style).ok() else { + return; + }; + worktree + .write_file(path.into_arc(), text.clone(), line_ending, encoding, has_bom, cx) + .detach_and_prompt_err("Failed to write lines", window, cx, |_, _, _| None); + }); + }) + .detach(); + }); + }); + }); + return; + } + if action.filename.is_empty() { + if let Some(workspace) = vim.workspace(window) { + workspace.update(cx, |workspace, cx| { + workspace + .save_active_item( + action.save_intent.unwrap_or(SaveIntent::Save), + window, + cx, + ) + .detach_and_prompt_err("Failed to save", window, cx, |_, _, _| None); + }); + } + return; + } vim.update_editor(cx, |_, editor, cx| { let Some(project) = editor.project().cloned() else { return; @@ -836,7 +967,7 @@ impl VimCommand { } }; - let rel_path = if args.ends_with(PathStyle::local().separator()) { + let rel_path = if args.ends_with(PathStyle::local().primary_separator()) { rel_path } else { rel_path @@ -869,7 +1000,7 @@ impl VimCommand { .display(PathStyle::local()) .to_string(); if dir.is_dir { - path_string.push_str(PathStyle::local().separator()); + path_string.push_str(PathStyle::local().primary_separator()); } path_string }) @@ -1175,24 +1306,34 @@ fn generate_commands(_: &App) -> Vec { vec![ VimCommand::new( ("w", "rite"), - workspace::Save { + VimSave { save_intent: Some(SaveIntent::Save), + filename: "".into(), + range: None, }, ) - .bang(workspace::Save { + .bang(VimSave { save_intent: Some(SaveIntent::Overwrite), + filename: "".into(), + range: None, }) .filename(|action, filename| { Some( VimSave { save_intent: action .as_any() - .downcast_ref::() + .downcast_ref::() .and_then(|action| action.save_intent), filename, + range: None, } .boxed_clone(), ) + }) + .range(|action, range| { + let mut action: VimSave = action.as_any().downcast_ref::().unwrap().clone(); + action.range.replace(range.clone()); + Some(Box::new(action)) }), VimCommand::new(("e", "dit"), editor::actions::ReloadFile) .bang(editor::actions::ReloadFile) @@ -1692,12 +1833,12 @@ pub fn command_interceptor( let mut positions: Vec<_> = positions.iter().map(|&pos| pos + offset).collect(); positions.splice(0..0, no_args_positions.clone()); let string = format!("{display_string} {string}"); - let action = match cx - .update(|cx| commands(cx).get(cmd_idx)?.parse(&string[1..], &range, cx)) - { - Ok(Some(action)) => action, - _ => continue, - }; + let (range, query) = VimCommand::parse_range(&string[1..]); + let action = + match cx.update(|cx| commands(cx).get(cmd_idx)?.parse(&query, &range, cx)) { + Ok(Some(action)) => action, + _ => continue, + }; results.push(CommandInterceptItem { action, string, @@ -2302,7 +2443,7 @@ impl ShellExec { #[cfg(test)] mod test { - use std::path::Path; + use std::path::{Path, PathBuf}; use crate::{ VimAddon, @@ -2314,7 +2455,7 @@ mod test { use indoc::indoc; use settings::Settings; use util::path; - use workspace::Workspace; + use workspace::{OpenOptions, Workspace}; #[gpui::test] async fn test_command_basics(cx: &mut TestAppContext) { @@ -2619,6 +2760,48 @@ mod test { }); } + #[gpui::test] + async fn test_command_write_range(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.workspace(|workspace, _, cx| { + assert_active_item(workspace, path!("/root/dir/file.rs"), "", cx); + }); + + cx.set_state( + indoc! {" + The quick + brown« fox + jumpsˇ» over + the lazy dog + "}, + Mode::Visual, + ); + + cx.simulate_keystrokes(": w space dir/other.rs"); + cx.simulate_keystrokes("enter"); + + let other = path!("/root/dir/other.rs"); + + let _ = cx + .workspace(|workspace, window, cx| { + workspace.open_abs_path(PathBuf::from(other), OpenOptions::default(), window, cx) + }) + .await; + + cx.workspace(|workspace, _, cx| { + assert_active_item( + workspace, + other, + indoc! {" + brown fox + jumps over + "}, + cx, + ); + }); + } + #[gpui::test] async fn test_command_matching_lines(cx: &mut TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index ee7c0a14fb721116c3fc1f2c3d1bf7b716b43f18..f902a8ff6e9f08475fb6ce8323a924730d3621d1 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -6,8 +6,8 @@ mod select; use editor::display_map::DisplaySnapshot; use editor::{ - DisplayPoint, Editor, EditorSettings, HideMouseCursorOrigin, SelectionEffects, ToOffset, - ToPoint, movement, + DisplayPoint, Editor, EditorSettings, HideMouseCursorOrigin, MultiBufferOffset, + SelectionEffects, ToOffset, ToPoint, movement, }; use gpui::actions; use gpui::{Context, Window}; @@ -15,8 +15,8 @@ use language::{CharClassifier, CharKind, Point}; use search::{BufferSearchBar, SearchOptions}; use settings::Settings; use text::{Bias, SelectionGoal}; -use workspace::searchable; use workspace::searchable::FilteredSearchRange; +use workspace::searchable::{self, Direction}; use crate::motion::{self, MotionKind}; use crate::state::SearchState; @@ -52,6 +52,10 @@ actions!( HelixSubstitute, /// Delete the selection and enter edit mode, without yanking the selection. HelixSubstituteNoYank, + /// Delete the selection and enter edit mode. + HelixSelectNext, + /// Delete the selection and enter edit mode, without yanking the selection. + HelixSelectPrevious, ] ); @@ -74,6 +78,8 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { }); Vim::action(editor, cx, Vim::helix_substitute); Vim::action(editor, cx, Vim::helix_substitute_no_yank); + Vim::action(editor, cx, Vim::helix_select_next); + Vim::action(editor, cx, Vim::helix_select_previous); } impl Vim { @@ -97,20 +103,82 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let text_layout_details = editor.text_layout_details(window); editor.change_selections(Default::default(), window, cx, |s| { + if let Motion::ZedSearchResult { new_selections, .. } = &motion { + s.select_anchor_ranges(new_selections.clone()); + return; + }; + s.move_with(|map, selection| { - let current_head = selection.head(); - - let Some((new_head, goal)) = motion.move_point( - map, - current_head, - selection.goal, - times, - &text_layout_details, - ) else { - return; + let was_reversed = selection.reversed; + let mut current_head = selection.head(); + + // our motions assume the current character is after the cursor, + // but in (forward) visual mode the current character is just + // before the end of the selection. + + // If the file ends with a newline (which is common) we don't do this. + // so that if you go to the end of such a file you can use "up" to go + // to the previous line and have it work somewhat as expected. + if !selection.reversed + && !selection.is_empty() + && !(selection.end.column() == 0 && selection.end == map.max_point()) + { + current_head = movement::left(map, selection.end) + } + + let (new_head, goal) = match motion { + // Going to next word start is special cased + // since Vim differs from Helix in that motion + // Vim: `w` goes to the first character of a word + // Helix: `w` goes to the character before a word + Motion::NextWordStart { ignore_punctuation } => { + let mut head = movement::right(map, current_head); + let classifier = + map.buffer_snapshot().char_classifier_at(head.to_point(map)); + for _ in 0..times.unwrap_or(1) { + let (_, new_head) = + movement::find_boundary_trail(map, head, |left, right| { + Self::is_boundary_right(ignore_punctuation)( + left, + right, + &classifier, + ) + }); + head = new_head; + } + head = movement::left(map, head); + (head, SelectionGoal::None) + } + _ => motion + .move_point( + map, + current_head, + selection.goal, + times, + &text_layout_details, + ) + .unwrap_or((current_head, selection.goal)), }; selection.set_head(new_head, goal); + + // ensure the current character is included in the selection. + if !selection.reversed { + let next_point = movement::right(map, selection.end); + + if !(next_point.column() == 0 && next_point == map.max_point()) { + selection.end = next_point; + } + } + + // vim always ensures the anchor character stays selected. + // if our selection has reversed, we need to move the opposite end + // to ensure the anchor is still selected. + if was_reversed && !selection.reversed { + selection.start = movement::left(map, selection.start); + } else if !was_reversed && selection.reversed { + selection.end = movement::right(map, selection.end); + } }) }); }); @@ -244,6 +312,30 @@ impl Vim { }); } + fn is_boundary_right( + ignore_punctuation: bool, + ) -> impl FnMut(char, char, &CharClassifier) -> bool { + move |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = (left == '\n') ^ (right == '\n'); + + (left_kind != right_kind && right_kind != CharKind::Whitespace) || at_newline + } + } + + fn is_boundary_left( + ignore_punctuation: bool, + ) -> impl FnMut(char, char, &CharClassifier) -> bool { + move |left, right, classifier| { + let left_kind = classifier.kind_with(left, ignore_punctuation); + let right_kind = classifier.kind_with(right, ignore_punctuation); + let at_newline = (left == '\n') ^ (right == '\n'); + + (left_kind != right_kind && left_kind != CharKind::Whitespace) || at_newline + } + } + pub fn helix_move_cursor( &mut self, motion: Motion, @@ -252,41 +344,54 @@ impl Vim { cx: &mut Context, ) { match motion { - Motion::NextWordStart { ignore_punctuation } => { - self.helix_find_range_forward(times, window, cx, |left, right, classifier| { - let left_kind = classifier.kind_with(left, ignore_punctuation); - let right_kind = classifier.kind_with(right, ignore_punctuation); - let at_newline = (left == '\n') ^ (right == '\n'); - - (left_kind != right_kind && right_kind != CharKind::Whitespace) || at_newline - }) - } - Motion::NextWordEnd { ignore_punctuation } => { - self.helix_find_range_forward(times, window, cx, |left, right, classifier| { - let left_kind = classifier.kind_with(left, ignore_punctuation); - let right_kind = classifier.kind_with(right, ignore_punctuation); - let at_newline = (left == '\n') ^ (right == '\n'); - - (left_kind != right_kind && left_kind != CharKind::Whitespace) || at_newline - }) - } - Motion::PreviousWordStart { ignore_punctuation } => { - self.helix_find_range_backward(times, window, cx, |left, right, classifier| { - let left_kind = classifier.kind_with(left, ignore_punctuation); - let right_kind = classifier.kind_with(right, ignore_punctuation); - let at_newline = (left == '\n') ^ (right == '\n'); + Motion::NextWordStart { ignore_punctuation } => self.helix_find_range_forward( + times, + window, + cx, + Self::is_boundary_right(ignore_punctuation), + ), + Motion::NextWordEnd { ignore_punctuation } => self.helix_find_range_forward( + times, + window, + cx, + Self::is_boundary_left(ignore_punctuation), + ), + Motion::PreviousWordStart { ignore_punctuation } => self.helix_find_range_backward( + times, + window, + cx, + Self::is_boundary_left(ignore_punctuation), + ), + Motion::PreviousWordEnd { ignore_punctuation } => self.helix_find_range_backward( + times, + window, + cx, + Self::is_boundary_right(ignore_punctuation), + ), + Motion::EndOfLine { .. } => { + // In Helix mode, EndOfLine should position cursor ON the last character, + // not after it. We therefore need special handling for it. + self.update_editor(cx, |_, editor, cx| { + let text_layout_details = editor.text_layout_details(window); + editor.change_selections(Default::default(), window, cx, |s| { + s.move_with(|map, selection| { + let goal = selection.goal; + let cursor = if selection.is_empty() || selection.reversed { + selection.head() + } else { + movement::left(map, selection.head()) + }; - (left_kind != right_kind && left_kind != CharKind::Whitespace) || at_newline - }) - } - Motion::PreviousWordEnd { ignore_punctuation } => { - self.helix_find_range_backward(times, window, cx, |left, right, classifier| { - let left_kind = classifier.kind_with(left, ignore_punctuation); - let right_kind = classifier.kind_with(right, ignore_punctuation); - let at_newline = (left == '\n') ^ (right == '\n'); + let (point, _goal) = motion + .move_point(map, cursor, goal, times, &text_layout_details) + .unwrap_or((cursor, goal)); - (left_kind != right_kind && right_kind != CharKind::Whitespace) || at_newline - }) + // Move left by one character to position on the last character + let adjusted_point = movement::saturating_left(map, point); + selection.collapse_to(adjusted_point, SelectionGoal::None) + }) + }); + }); } Motion::FindForward { before, @@ -450,7 +555,7 @@ impl Vim { prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode: self.mode, - is_helix_regex_search: true, + helix_select: true, } }); } @@ -512,7 +617,7 @@ impl Vim { ..range.end.to_offset(&display_map, Bias::Left); if !byte_range.is_empty() { - let replacement_text = text.repeat(byte_range.len()); + let replacement_text = text.repeat(byte_range.end - byte_range.start); edits.push((byte_range, replacement_text)); } } @@ -609,7 +714,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let newest = editor .selections - .newest::(&editor.display_snapshot(cx)); + .newest::(&editor.display_snapshot(cx)); editor.change_selections(Default::default(), window, cx, |s| s.select(vec![newest])); }); } @@ -664,6 +769,68 @@ impl Vim { ) { self.do_helix_substitute(false, window, cx); } + + fn helix_select_next( + &mut self, + _: &HelixSelectNext, + window: &mut Window, + cx: &mut Context, + ) { + self.do_helix_select(Direction::Next, window, cx); + } + + fn helix_select_previous( + &mut self, + _: &HelixSelectPrevious, + window: &mut Window, + cx: &mut Context, + ) { + self.do_helix_select(Direction::Prev, window, cx); + } + + fn do_helix_select( + &mut self, + direction: searchable::Direction, + window: &mut Window, + cx: &mut Context, + ) { + let Some(pane) = self.pane(window, cx) else { + return; + }; + let count = Vim::take_count(cx).unwrap_or(1); + Vim::take_forced_motion(cx); + let prior_selections = self.editor_selections(window, cx); + + let success = pane.update(cx, |pane, cx| { + let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() else { + return false; + }; + search_bar.update(cx, |search_bar, cx| { + if !search_bar.has_active_match() || !search_bar.show(window, cx) { + return false; + } + search_bar.select_match(direction, count, window, cx); + true + }) + }); + + if !success { + return; + } + if self.mode == Mode::HelixSelect { + self.update_editor(cx, |_vim, editor, cx| { + let snapshot = editor.snapshot(window, cx); + editor.change_selections(SelectionEffects::default(), window, cx, |s| { + s.select_anchor_ranges( + prior_selections + .iter() + .cloned() + .chain(s.all_anchors(&snapshot).iter().map(|s| s.range())), + ); + }) + }); + } + } } #[cfg(test)] @@ -1222,11 +1389,12 @@ mod test { Mode::HelixNormal, ); cx.simulate_keystrokes("x"); + // Adjacent line selections stay separate (not merged) cx.assert_state( indoc! {" «line one line two - line three + ˇ»«line three line four ˇ»line five"}, Mode::HelixNormal, @@ -1296,6 +1464,30 @@ mod test { cx.assert_state("«one ˇ»two", Mode::HelixNormal); } + #[gpui::test] + async fn test_helix_select_motion(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + cx.set_state("«ˇ»one two three", Mode::HelixSelect); + cx.simulate_keystrokes("w"); + cx.assert_state("«one ˇ»two three", Mode::HelixSelect); + + cx.set_state("«ˇ»one two three", Mode::HelixSelect); + cx.simulate_keystrokes("e"); + cx.assert_state("«oneˇ» two three", Mode::HelixSelect); + } + + #[gpui::test] + async fn test_helix_full_cursor_selection(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + cx.set_state("ˇone two three", Mode::HelixNormal); + cx.simulate_keystrokes("l l v h h h"); + cx.assert_state("«ˇone» two three", Mode::HelixSelect); + } + #[gpui::test] async fn test_helix_select_regex(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1355,7 +1547,7 @@ mod test { cx.simulate_keystrokes("/ o n e"); cx.simulate_keystrokes("enter"); cx.simulate_keystrokes("n n"); - cx.assert_state("ˇhello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect); + cx.assert_state("hello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect); } #[gpui::test] @@ -1420,4 +1612,59 @@ mod test { Mode::Insert, ); } + + #[gpui::test] + async fn test_g_l_end_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Test g l moves to last character, not after it + cx.set_state("hello ˇworld!", Mode::HelixNormal); + cx.simulate_keystrokes("g l"); + cx.assert_state("hello worldˇ!", Mode::HelixNormal); + + // Test with Chinese characters, test if work with UTF-8? + cx.set_state("ˇ你好世界", Mode::HelixNormal); + cx.simulate_keystrokes("g l"); + cx.assert_state("你好世ˇ界", Mode::HelixNormal); + + // Test with end of line + cx.set_state("endˇ", Mode::HelixNormal); + cx.simulate_keystrokes("g l"); + cx.assert_state("enˇd", Mode::HelixNormal); + + // Test with empty line + cx.set_state( + indoc! {" + hello + ˇ + world"}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("g l"); + cx.assert_state( + indoc! {" + hello + ˇ + world"}, + Mode::HelixNormal, + ); + + // Test with multiple lines + cx.set_state( + indoc! {" + ˇfirst line + second line + third line"}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("g l"); + cx.assert_state( + indoc! {" + first linˇe + second line + third line"}, + Mode::HelixNormal, + ); + } } diff --git a/crates/vim/src/helix/boundary.rs b/crates/vim/src/helix/boundary.rs index a6de926bc5a10415dad6584f3d07476b2bf0e5d7..0c2ebbeef00a306a388756455bfb6ffcd40395e8 100644 --- a/crates/vim/src/helix/boundary.rs +++ b/crates/vim/src/helix/boundary.rs @@ -1,10 +1,7 @@ -use std::{ - cmp::Ordering, - ops::{Deref, DerefMut, Range}, -}; +use std::{cmp::Ordering, ops::Range}; use editor::{ - DisplayPoint, + DisplayPoint, MultiBufferOffset, display_map::{DisplaySnapshot, ToDisplayPoint}, movement, }; @@ -104,8 +101,8 @@ trait BoundedObject { let next_end = self.next_end(map, end_search_start, outer)?; let maybe_next_start = self.next_start(map, start_search_start, outer); if let Some(next_start) = maybe_next_start - && (*next_start < *next_end - || *next_start == *next_end && self.can_be_zero_width(outer)) + && (next_start.0 < next_end.0 + || next_start.0 == next_end.0 && self.can_be_zero_width(outer)) && !self.ambiguous_outer() { let closing = self.close_at_end(next_start, map, outer)?; @@ -133,8 +130,8 @@ trait BoundedObject { let previous_start = self.previous_start(map, start_search_end, outer)?; let maybe_previous_end = self.previous_end(map, end_search_end, outer); if let Some(previous_end) = maybe_previous_end - && (*previous_end > *previous_start - || *previous_end == *previous_start && self.can_be_zero_width(outer)) + && (previous_end.0 > previous_start.0 + || previous_end.0 == previous_start.0 && self.can_be_zero_width(outer)) && !self.ambiguous_outer() { let closing = self.close_at_start(previous_end, map, outer)?; @@ -151,30 +148,22 @@ trait BoundedObject { } } -#[derive(Clone, Copy, PartialEq, Debug)] -struct Offset(usize); -impl Deref for Offset { - type Target = usize; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Offset { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} +#[derive(Clone, Copy, PartialEq, Debug, PartialOrd, Ord, Eq)] +struct Offset(MultiBufferOffset); impl Offset { fn next(self, map: &DisplaySnapshot) -> Option { - let next = Self(map.buffer_snapshot().clip_offset(*self + 1, Bias::Right)); - (*next > *self).then(|| next) + let next = Self( + map.buffer_snapshot() + .clip_offset(self.0 + 1usize, Bias::Right), + ); + (next.0 > self.0).then(|| next) } fn previous(self, map: &DisplaySnapshot) -> Option { - if *self == 0 { + if self.0 == MultiBufferOffset(0) { return None; } Some(Self( - map.buffer_snapshot().clip_offset(*self - 1, Bias::Left), + map.buffer_snapshot().clip_offset(self.0 - 1, Bias::Left), )) } fn range( @@ -211,7 +200,7 @@ impl HelixTextObject for B { let max_end = self.close_at_end(search_start, map, find_outer)?; let min_start = self.close_at_start(max_end, map, find_outer)?; - (*min_start <= *relative_to.start).then(|| min_start..max_end) + (min_start <= relative_to.start).then(|| min_start..max_end) }) } @@ -279,8 +268,8 @@ fn relative_range( min_start..max_end }; - let start = wanted_range.start.clone().to_display_point(map); - let end = wanted_range.end.clone().to_display_point(map); + let start = wanted_range.start.0.to_display_point(map); + let end = wanted_range.end.0.to_display_point(map); Some(start..end) } @@ -390,7 +379,7 @@ impl ImmediateBoundary { impl BoundedObject for ImmediateBoundary { fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option { try_find_boundary(map, from, |left, right| { - let classifier = map.buffer_snapshot().char_classifier_at(*from); + let classifier = map.buffer_snapshot().char_classifier_at(from.0); if outer { self.is_outer_start(left, right, classifier) } else { @@ -400,7 +389,7 @@ impl BoundedObject for ImmediateBoundary { } fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option { try_find_boundary(map, from, |left, right| { - let classifier = map.buffer_snapshot().char_classifier_at(*from); + let classifier = map.buffer_snapshot().char_classifier_at(from.0); if outer { self.is_outer_end(left, right, classifier) } else { @@ -410,7 +399,7 @@ impl BoundedObject for ImmediateBoundary { } fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option { try_find_preceding_boundary(map, from, |left, right| { - let classifier = map.buffer_snapshot().char_classifier_at(*from); + let classifier = map.buffer_snapshot().char_classifier_at(from.0); if outer { self.is_outer_start(left, right, classifier) } else { @@ -420,7 +409,7 @@ impl BoundedObject for ImmediateBoundary { } fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option { try_find_preceding_boundary(map, from, |left, right| { - let classifier = map.buffer_snapshot().char_classifier_at(*from); + let classifier = map.buffer_snapshot().char_classifier_at(from.0); if outer { self.is_outer_end(left, right, classifier) } else { @@ -572,7 +561,7 @@ impl FuzzyBoundary { boundary_kind: Boundary, ) -> Option { let generate_boundary_data = |left, right, point: Offset| { - let classifier = map.buffer_snapshot().char_classifier_at(*from); + let classifier = map.buffer_snapshot().char_classifier_at(from.0); let reach_boundary = if outer && boundary_kind == Boundary::Start { self.is_near_potential_outer_start(left, right, &classifier) } else if !outer && boundary_kind == Boundary::Start { @@ -598,9 +587,9 @@ impl FuzzyBoundary { Ordering::Greater => !backward, }); if backward { - boundaries.max_by_key(|boundary| **boundary) + boundaries.max_by_key(|boundary| *boundary) } else { - boundaries.min_by_key(|boundary| **boundary) + boundaries.min_by_key(|boundary| *boundary) } } } @@ -662,15 +651,15 @@ fn try_find_boundary_data( ) -> Option { let mut prev_ch = map .buffer_snapshot() - .reversed_chars_at(*from) + .reversed_chars_at(from.0) .next() .unwrap_or('\0'); - for ch in map.buffer_snapshot().chars_at(*from).chain(['\0']) { + for ch in map.buffer_snapshot().chars_at(from.0).chain(['\0']) { if let Some(boundary_information) = boundary_information(prev_ch, ch, from) { return Some(boundary_information); } - *from += ch.len_utf8(); + from.0 += ch.len_utf8(); prev_ch = ch; } @@ -702,13 +691,21 @@ fn try_find_preceding_boundary_data( mut from: Offset, is_boundary: impl Fn(char, char, Offset) -> Option, ) -> Option { - let mut prev_ch = map.buffer_snapshot().chars_at(*from).next().unwrap_or('\0'); + let mut prev_ch = map + .buffer_snapshot() + .chars_at(from.0) + .next() + .unwrap_or('\0'); - for ch in map.buffer_snapshot().reversed_chars_at(*from).chain(['\0']) { + for ch in map + .buffer_snapshot() + .reversed_chars_at(from.0) + .chain(['\0']) + { if let Some(boundary_information) = is_boundary(ch, prev_ch, from) { return Some(boundary_information); } - from.0 = from.0.saturating_sub(ch.len_utf8()); + from.0.0 = from.0.0.saturating_sub(ch.len_utf8()); prev_ch = ch; } diff --git a/crates/vim/src/helix/duplicate.rs b/crates/vim/src/helix/duplicate.rs index 1b1f10b00b6a7381f22c6ec3be674dc2c085eff6..37796c57aa0b9e27f2d7d786c9b8870e49d5871e 100644 --- a/crates/vim/src/helix/duplicate.rs +++ b/crates/vim/src/helix/duplicate.rs @@ -1,6 +1,6 @@ use std::ops::Range; -use editor::{DisplayPoint, display_map::DisplaySnapshot}; +use editor::{DisplayPoint, MultiBufferOffset, display_map::DisplaySnapshot}; use gpui::Context; use text::Bias; use ui::Window; @@ -111,7 +111,7 @@ fn find_next_valid_duplicate_space( fn display_point_range_to_offset_range( range: &Range, map: &DisplaySnapshot, -) -> Range { +) -> Range { range.start.to_offset(map, Bias::Left)..range.end.to_offset(map, Bias::Right) } diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs index 67af7650011b0220f4ad05cebb6badf5d0ba7aa7..d91b138853abb07dc10957a4ee1f5af158066e06 100644 --- a/crates/vim/src/helix/paste.rs +++ b/crates/vim/src/helix/paste.rs @@ -125,7 +125,7 @@ impl Vim { s.select_ranges(new_selections.into_iter().map(|(anchor, len)| { let offset = anchor.to_offset(&snapshot); if action.before { - offset.saturating_sub(len)..offset + offset.saturating_sub_usize(len)..offset } else { offset..(offset + len) } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 2da1083ee6623cc8a463ef31be7e90dca0063b34..f2e629faf2dd4a5d1ff47a49278cdd022f75d8d4 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1,5 +1,5 @@ use editor::{ - Anchor, Bias, DisplayPoint, Editor, RowExt, ToOffset, ToPoint, + Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset, display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint}, movement::{ self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point, @@ -672,40 +672,30 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { impl Vim { pub(crate) fn search_motion(&mut self, m: Motion, window: &mut Window, cx: &mut Context) { - let Motion::ZedSearchResult { - prior_selections, - new_selections, + if let Motion::ZedSearchResult { + prior_selections, .. } = &m - else { - return; - }; - - match self.mode { - Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { - if !prior_selections.is_empty() { - self.update_editor(cx, |_, editor, cx| { - editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(prior_selections.iter().cloned()); + { + match self.mode { + Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { + if !prior_selections.is_empty() { + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(prior_selections.iter().cloned()) + }) }); - }); + } } - self.motion(m, window, cx); - } - Mode::Normal | Mode::Replace | Mode::Insert => { - if self.active_operator().is_some() { - self.motion(m, window, cx); + Mode::Normal | Mode::Replace | Mode::Insert => { + if self.active_operator().is_none() { + return; + } } - } - - Mode::HelixNormal => {} - Mode::HelixSelect => { - self.update_editor(cx, |_, editor, cx| { - editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(prior_selections.iter().chain(new_selections).cloned()); - }); - }); + Mode::HelixNormal | Mode::HelixSelect => {} } } + + self.motion(m, window, cx) } pub(crate) fn motion(&mut self, motion: Motion, window: &mut Window, cx: &mut Context) { @@ -2153,7 +2143,7 @@ pub(crate) fn sentence_backwards( if start_of_next_sentence < start { times = times.saturating_sub(1); } - if times == 0 || offset == 0 { + if times == 0 || offset.0 == 0 { return map.clip_point( start_of_next_sentence .to_offset(&map.buffer_snapshot()) @@ -2217,7 +2207,7 @@ pub(crate) fn sentence_forwards( map.max_point() } -fn next_non_blank(map: &DisplaySnapshot, start: usize) -> usize { +fn next_non_blank(map: &DisplaySnapshot, start: MultiBufferOffset) -> MultiBufferOffset { for (c, o) in map.buffer_chars_at(start) { if c == '\n' || !c.is_whitespace() { return o; @@ -2229,7 +2219,10 @@ fn next_non_blank(map: &DisplaySnapshot, start: usize) -> usize { // given the offset after a ., !, or ? find the start of the next sentence. // if this is not a sentence boundary, returns None. -fn start_of_next_sentence(map: &DisplaySnapshot, end_of_sentence: usize) -> Option { +fn start_of_next_sentence( + map: &DisplaySnapshot, + end_of_sentence: MultiBufferOffset, +) -> Option { let chars = map.buffer_chars_at(end_of_sentence); let mut seen_space = false; @@ -2263,35 +2256,25 @@ fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) - .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), ); let buffer_range = excerpt.buffer_range(); - if offset >= buffer_range.start && offset <= buffer_range.end { + if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 { let point = map .buffer_snapshot() - .offset_to_point(excerpt.map_offset_from_buffer(offset)); + .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset))); return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left); } - let mut last_position = None; for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() { let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer) ..language::ToOffset::to_offset(&range.context.end, buffer); if offset >= excerpt_range.start && offset <= excerpt_range.end { let text_anchor = buffer.anchor_after(offset); - let anchor = Anchor::in_buffer(excerpt, buffer.remote_id(), text_anchor); + let anchor = Anchor::in_buffer(excerpt, text_anchor); return anchor.to_display_point(map); } else if offset <= excerpt_range.start { - let anchor = Anchor::in_buffer(excerpt, buffer.remote_id(), range.context.start); + let anchor = Anchor::in_buffer(excerpt, range.context.start); return anchor.to_display_point(map); - } else { - last_position = Some(Anchor::in_buffer( - excerpt, - buffer.remote_id(), - range.context.end, - )); } } - let mut last_point = last_position.unwrap().to_point(&map.buffer_snapshot()); - last_point.column = point.column; - map.clip_point( map.point_to_display_point( map.buffer_snapshot().clip_point(point, Bias::Left), @@ -2370,6 +2353,9 @@ fn matching_tag(map: &DisplaySnapshot, head: DisplayPoint) -> Option DisplayPoint { + if !map.is_singleton() { + return display_point; + } // https://github.com/vim/vim/blob/1d87e11a1ef201b26ed87585fba70182ad0c468a/runtime/doc/motion.txt#L1200 let display_point = map.clip_at_line_end(display_point); let point = display_point.to_point(map); @@ -2385,9 +2371,10 @@ fn matching(map: &DisplaySnapshot, display_point: DisplayPoint) -> DisplayPoint // Attempt to find the smallest enclosing bracket range that also contains // the offset, which only happens if the cursor is currently in a bracket. let range_filter = |_buffer: &language::BufferSnapshot, - opening_range: Range, - closing_range: Range| { - opening_range.contains(&offset) || closing_range.contains(&offset) + opening_range: Range, + closing_range: Range| { + opening_range.contains(&BufferOffset(offset.0)) + || closing_range.contains(&BufferOffset(offset.0)) }; let bracket_ranges = snapshot @@ -2395,10 +2382,16 @@ fn matching(map: &DisplaySnapshot, display_point: DisplayPoint) -> DisplayPoint .or_else(|| snapshot.innermost_enclosing_bracket_ranges(offset..offset, None)); if let Some((opening_range, closing_range)) = bracket_ranges { - if opening_range.contains(&offset) { - return closing_range.start.to_display_point(map); - } else if closing_range.contains(&offset) { - return opening_range.start.to_display_point(map); + let mut chars = map.buffer_snapshot().chars_at(offset); + match chars.next() { + Some('/') => {} + _ => { + if opening_range.contains(&offset) { + return closing_range.start.to_display_point(map); + } else if closing_range.contains(&offset) { + return opening_range.start.to_display_point(map); + } + } } } @@ -2850,7 +2843,7 @@ fn method_motion( for _ in 0..times { let point = map.display_point_to_point(display_point, Bias::Left); - let offset = point.to_offset(&map.buffer_snapshot()); + let offset = point.to_offset(&map.buffer_snapshot()).0; let range = if direction == Direction::Prev { 0..offset } else { @@ -2879,7 +2872,7 @@ fn method_motion( } else { possibilities.min().unwrap_or(offset) }; - let new_point = map.clip_point(dest.to_display_point(map), Bias::Left); + let new_point = map.clip_point(MultiBufferOffset(dest).to_display_point(map), Bias::Left); if new_point == display_point { break; } @@ -2900,7 +2893,7 @@ fn comment_motion( for _ in 0..times { let point = map.display_point_to_point(display_point, Bias::Left); - let offset = point.to_offset(&map.buffer_snapshot()); + let offset = point.to_offset(&map.buffer_snapshot()).0; let range = if direction == Direction::Prev { 0..offset } else { @@ -2933,7 +2926,7 @@ fn comment_motion( } else { possibilities.min().unwrap_or(offset) }; - let new_point = map.clip_point(dest.to_display_point(map), Bias::Left); + let new_point = map.clip_point(MultiBufferOffset(dest).to_display_point(map), Bias::Left); if new_point == display_point { break; } @@ -2956,7 +2949,7 @@ fn section_motion( .display_point_to_point(display_point, Bias::Left) .to_offset(&map.buffer_snapshot()); let range = if direction == Direction::Prev { - 0..offset + MultiBufferOffset(0)..offset } else { offset..map.buffer_snapshot().len() }; @@ -2987,7 +2980,7 @@ fn section_motion( let relevant = if is_start { range.start } else { range.end }; if direction == Direction::Prev && relevant < offset { Some(relevant) - } else if direction == Direction::Next && relevant > offset + 1 { + } else if direction == Direction::Next && relevant > offset + 1usize { Some(relevant) } else { None @@ -2995,7 +2988,7 @@ fn section_motion( }); let offset = if direction == Direction::Prev { - possibilities.max().unwrap_or(0) + possibilities.max().unwrap_or(MultiBufferOffset(0)) } else { possibilities.min().unwrap_or(map.buffer_snapshot().len()) }; @@ -3312,6 +3305,96 @@ mod test { cx.shared_state().await.assert_eq("ˇ(\n {()} \n)"); } + #[gpui::test] + async fn test_unmatched_forward_markdown(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new_markdown_with_rust(cx).await; + + cx.neovim.exec("set filetype=markdown").await; + + cx.set_shared_state(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + ˇ } + } + ``` + "}) + .await; + cx.simulate_shared_keystrokes("] }").await; + cx.shared_state().await.assert_eq(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + ˇ} + } + ``` + "}); + + cx.set_shared_state(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } ˇ + } + ``` + "}) + .await; + cx.simulate_shared_keystrokes("] }").await; + cx.shared_state().await.assert_eq(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } • + ˇ} + ``` + "}); + } + + #[gpui::test] + async fn test_unmatched_backward_markdown(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new_markdown_with_rust(cx).await; + + cx.neovim.exec("set filetype=markdown").await; + + cx.set_shared_state(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + ˇ } + } + ``` + "}) + .await; + cx.simulate_shared_keystrokes("[ {").await; + cx.shared_state().await.assert_eq(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> ˇ{ + } + } + ``` + "}); + + cx.set_shared_state(indoc! {r" + ```rs + impl Worktree { + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } ˇ + } + ``` + "}) + .await; + cx.simulate_shared_keystrokes("[ {").await; + cx.shared_state().await.assert_eq(indoc! {r" + ```rs + impl Worktree ˇ{ + pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> { + } • + } + ``` + "}); + } + #[gpui::test] async fn test_matching_tags(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new_html(cx).await; @@ -3360,6 +3443,23 @@ mod test { test = "test" /> "#}); + + // test nested closing tag + cx.set_shared_state(indoc! {r#" + + + "#}) + .await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r#" + + <ˇ/body> + "#}); + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r#" + <ˇbody> + + "#}); } #[gpui::test] diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 8b4aefcaac371383dd3114c2b12abd166ef9aa72..aee0b424f04d49cc634048bb64f95805beef8455 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -3,7 +3,7 @@ mod convert; mod delete; mod increment; pub(crate) mod mark; -mod paste; +pub(crate) mod paste; pub(crate) mod repeat; mod scroll; pub(crate) mod search; @@ -578,8 +578,21 @@ impl Vim { window: &mut Window, cx: &mut Context, ) { - self.update_editor(cx, |_, editor, cx| { + self.update_editor(cx, |vim, editor, cx| { let text_layout_details = editor.text_layout_details(window); + + // If vim is in temporary mode and the motion being used is + // `EndOfLine` ($), we'll want to disable clipping at line ends so + // that the newline character can be selected so that, when moving + // back to visual mode, the cursor will be placed after the last + // character and not before it. + let clip_at_line_ends = editor.clip_at_line_ends(cx); + let should_disable_clip = matches!(motion, Motion::EndOfLine { .. }) && vim.temp_mode; + + if should_disable_clip { + editor.set_clip_at_line_ends(false, cx) + }; + editor.change_selections( SelectionEffects::default().nav_history(motion.push_to_jump_list()), window, @@ -591,7 +604,11 @@ impl Vim { .unwrap_or((cursor, goal)) }) }, - ) + ); + + if should_disable_clip { + editor.set_clip_at_line_ends(clip_at_line_ends, cx); + }; }); } @@ -671,13 +688,13 @@ impl Vim { self.start_recording(cx); self.switch_mode(Mode::Insert, false, window, cx); self.update_editor(cx, |vim, editor, cx| { - let Some(Mark::Local(marks)) = vim.get_mark("^", editor, window, cx) else { - return; - }; - - editor.change_selections(Default::default(), window, cx, |s| { - s.select_anchor_ranges(marks.iter().map(|mark| *mark..*mark)) - }); + if let Some(Mark::Local(marks)) = vim.get_mark("^", editor, window, cx) + && !marks.is_empty() + { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_anchor_ranges(marks.iter().map(|mark| *mark..*mark)) + }); + } }); } @@ -2269,4 +2286,35 @@ mod test { assert_eq!(workspace.active_pane().read(cx).active_item_index(), 1); }); } + + #[gpui::test] + async fn test_temporary_mode(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + // Test jumping to the end of the line ($). + cx.set_shared_state(indoc! {"lorem ˇipsum"}).await; + cx.simulate_shared_keystrokes("i").await; + cx.shared_state().await.assert_matches(); + cx.simulate_shared_keystrokes("ctrl-o $").await; + cx.shared_state().await.assert_eq(indoc! {"lorem ipsumˇ"}); + + // Test jumping to the next word. + cx.set_shared_state(indoc! {"loremˇ ipsum dolor"}).await; + cx.simulate_shared_keystrokes("a").await; + cx.shared_state().await.assert_matches(); + cx.simulate_shared_keystrokes("a n d space ctrl-o w").await; + cx.shared_state() + .await + .assert_eq(indoc! {"lorem and ipsum ˇdolor"}); + + // Test yanking to end of line ($). + cx.set_shared_state(indoc! {"lorem ˇipsum dolor"}).await; + cx.simulate_shared_keystrokes("i").await; + cx.shared_state().await.assert_matches(); + cx.simulate_shared_keystrokes("a n d space ctrl-o y $") + .await; + cx.shared_state() + .await + .assert_eq(indoc! {"lorem and ˇipsum dolor"}); + } } diff --git a/crates/vim/src/normal/change.rs b/crates/vim/src/normal/change.rs index 4735c64792f3639b2c0d6581e6179484e842f386..b0b0bddae19b27fa382d4c84c3fdd4df8ba83a43 100644 --- a/crates/vim/src/normal/change.rs +++ b/crates/vim/src/normal/change.rs @@ -121,7 +121,11 @@ impl Vim { }); }); if objects_found { - vim.copy_selections_content(editor, MotionKind::Exclusive, window, cx); + let kind = match object.target_visual_mode(vim.mode, around) { + Mode::VisualLine => MotionKind::Linewise, + _ => MotionKind::Exclusive, + }; + vim.copy_selections_content(editor, kind, window, cx); editor.insert("", window, cx); editor.refresh_edit_prediction(true, false, window, cx); } diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 888d9ff25b63fad2e7fc0cf6cf534bfb1a7aaf76..d9ef32deba5a3beb530d9ee42e2a6254df8c253b 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -210,10 +210,15 @@ fn find_target( .map_or(false, |ch| ch.is_ascii_hexdigit()); let mut pre_char = String::new(); + let next_offset = offset + + snapshot + .chars_at(start_offset) + .next() + .map_or(0, |ch| ch.len_utf8()); // Backward scan to find the start of the number, but stop at start_offset - for ch in snapshot.reversed_chars_at(offset + 1) { + for ch in snapshot.reversed_chars_at(next_offset) { // Search boundaries - if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { + if offset.0 == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { break; } @@ -818,6 +823,14 @@ mod test { cx.set_state("trueˇ 1 2 3", Mode::Normal); cx.simulate_keystrokes("ctrl-a"); cx.assert_state("true ˇ2 2 3", Mode::Normal); + + cx.set_state("falseˇ", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("truˇe", Mode::Normal); + + cx.set_state("⚡️ˇ⚡️", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("⚡️ˇ⚡️", Mode::Normal); } #[gpui::test] diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index 3bb040511fdd7fa53dd97198ae02b492b0e7359d..a4d85e87b24fa6e2753f0dbcfcbb43be9488f41a 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -372,9 +372,12 @@ pub fn jump_motion( #[cfg(test)] mod test { + use crate::test::{NeovimBackedTestContext, VimTestContext}; + use editor::Editor; use gpui::TestAppContext; - - use crate::test::NeovimBackedTestContext; + use std::path::Path; + use util::path; + use workspace::{CloseActiveItem, OpenOptions}; #[gpui::test] async fn test_quote_mark(cx: &mut TestAppContext) { @@ -394,4 +397,69 @@ mod test { cx.simulate_shared_keystrokes("^ ` `").await; cx.shared_state().await.assert_eq("Hello, worldˇ!"); } + + #[gpui::test] + async fn test_global_mark_overwrite(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + let path = Path::new(path!("/first.rs")); + let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone()); + fs.as_fake().insert_file(path, "one".into()).await; + let path = Path::new(path!("/second.rs")); + fs.as_fake().insert_file(path, "two".into()).await; + + let _ = cx + .workspace(|workspace, window, cx| { + workspace.open_abs_path( + path!("/first.rs").into(), + OpenOptions::default(), + window, + cx, + ) + }) + .await; + + cx.simulate_keystrokes("m A"); + + let _ = cx + .workspace(|workspace, window, cx| { + workspace.open_abs_path( + path!("/second.rs").into(), + OpenOptions::default(), + window, + cx, + ) + }) + .await; + + cx.simulate_keystrokes("m A"); + + let _ = cx + .workspace(|workspace, window, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.close_active_item(&CloseActiveItem::default(), window, cx) + }) + }) + .await; + + cx.simulate_keystrokes("m B"); + + cx.simulate_keystrokes("' A"); + + cx.workspace(|workspace, _, cx| { + let active_editor = workspace.active_item_as::(cx).unwrap(); + + let buffer = active_editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .unwrap(); + + let file = buffer.read(cx).file().unwrap(); + let file_path = file.as_local().unwrap().abs_path(cx); + + assert_eq!(file_path.to_str().unwrap(), path!("/second.rs")); + }) + } } diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 74a28322d13b6ab0f563e6953f6b1edbfea66740..82af828deb85e6e0ef36ea2853a251547051feed 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -1,4 +1,7 @@ -use editor::{DisplayPoint, RowExt, SelectionEffects, display_map::ToDisplayPoint, movement}; +use editor::{ + DisplayPoint, MultiBufferOffset, RowExt, SelectionEffects, display_map::ToDisplayPoint, + movement, +}; use gpui::{Action, Context, Window}; use language::{Bias, SelectionGoal}; use schemars::JsonSchema; @@ -15,7 +18,7 @@ use crate::{ }; /// Pastes text from the specified register at the cursor position. -#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)] +#[derive(Clone, Default, Deserialize, JsonSchema, PartialEq, Action)] #[action(namespace = vim)] #[serde(deny_unknown_fields)] pub struct Paste { @@ -174,7 +177,10 @@ impl Vim { original_indent_columns.push(original_indent_column); } - let cursor_offset = editor.selections.last::(&display_map).head(); + let cursor_offset = editor + .selections + .last::(&display_map) + .head(); if editor .buffer() .read(cx) @@ -711,7 +717,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings(cx, |settings| { settings.project.all_languages.languages.0.insert( - LanguageName::new("Rust").0, + LanguageName::new_static("Rust").0, LanguageSettingsContent { auto_indent_on_paste: Some(false), ..Default::default() @@ -767,6 +773,52 @@ mod test { "}); } + #[gpui::test] + async fn test_paste_system_clipboard_never(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |s| { + s.vim.get_or_insert_default().use_system_clipboard = Some(UseSystemClipboard::Never) + }); + }); + + cx.set_state( + indoc! {" + ˇThe quick brown + fox jumps over + the lazy dog"}, + Mode::Normal, + ); + + cx.write_to_clipboard(ClipboardItem::new_string("something else".to_string())); + + cx.simulate_keystrokes("d d"); + cx.assert_state( + indoc! {" + ˇfox jumps over + the lazy dog"}, + Mode::Normal, + ); + + cx.simulate_keystrokes("shift-v p"); + cx.assert_state( + indoc! {" + ˇThe quick brown + the lazy dog"}, + Mode::Normal, + ); + + cx.simulate_keystrokes("shift-v"); + cx.dispatch_action(editor::actions::Paste); + cx.assert_state( + indoc! {" + ˇsomething else + the lazy dog"}, + Mode::Normal, + ); + } + #[gpui::test] async fn test_numbered_registers(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/normal/repeat.rs b/crates/vim/src/normal/repeat.rs index 2d7927480869f7a14cff7e2051ec421268df1d97..e47b2b350f9644f99fe7d8ec924ff0f0b9ab23f7 100644 --- a/crates/vim/src/normal/repeat.rs +++ b/crates/vim/src/normal/repeat.rs @@ -110,7 +110,24 @@ impl Replayer { } lock.running = true; let this = self.clone(); - window.defer(cx, move |window, cx| this.next(window, cx)) + window.defer(cx, move |window, cx| { + this.next(window, cx); + let Some(Some(workspace)) = window.root::() else { + return; + }; + let Some(editor) = workspace + .read(cx) + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + else { + return; + }; + editor.update(cx, |editor, cx| { + editor + .buffer() + .update(cx, |multi, cx| multi.finalize_last_transaction(cx)) + }); + }) } pub fn stop(self) { @@ -213,8 +230,19 @@ impl Vim { window: &mut Window, cx: &mut Context, ) { - let count = Vim::take_count(cx); + if self.active_operator().is_some() { + Vim::update_globals(cx, |globals, _| { + globals.recording_actions.clear(); + globals.recording_count = None; + globals.dot_recording = false; + globals.stop_recording_after_next_action = false; + }); + self.clear_operator(window, cx); + return; + } + Vim::take_forced_motion(cx); + let count = Vim::take_count(cx); let Some((mut actions, selection, mode)) = Vim::update_globals(cx, |globals, _| { let actions = globals.recorded_actions.clone(); @@ -793,4 +821,91 @@ mod test { cx.simulate_shared_keystrokes("@ b").await; cx.shared_state().await.assert_eq("aaaaaaabbbˇd"); } + + #[gpui::test] + async fn test_repeat_clear(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Check that, when repeat is preceded by something other than a number, + // the current operator is cleared, in order to prevent infinite loops. + cx.set_state("ˇhello world", Mode::Normal); + cx.simulate_keystrokes("d ."); + assert_eq!(cx.active_operator(), None); + } + + #[gpui::test] + async fn test_repeat_clear_repeat(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! { + "ˇthe quick brown + fox jumps over + the lazy dog" + }) + .await; + cx.simulate_shared_keystrokes("d d").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇfox jumps over + the lazy dog" + }); + cx.simulate_shared_keystrokes("d . .").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇthe lazy dog" + }); + } + + #[gpui::test] + async fn test_repeat_clear_count(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! { + "ˇthe quick brown + fox jumps over + the lazy dog" + }) + .await; + cx.simulate_shared_keystrokes("d d").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇfox jumps over + the lazy dog" + }); + cx.simulate_shared_keystrokes("2 d .").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇfox jumps over + the lazy dog" + }); + cx.simulate_shared_keystrokes(".").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇthe lazy dog" + }); + + cx.set_shared_state(indoc! { + "ˇthe quick brown + fox jumps over + the lazy dog + the quick brown + fox jumps over + the lazy dog" + }) + .await; + cx.simulate_shared_keystrokes("2 d d").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇthe lazy dog + the quick brown + fox jumps over + the lazy dog" + }); + cx.simulate_shared_keystrokes("5 d .").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇthe lazy dog + the quick brown + fox jumps over + the lazy dog" + }); + cx.simulate_shared_keystrokes(".").await; + cx.shared_state().await.assert_eq(indoc! { + "ˇfox jumps over + the lazy dog" + }); + } } diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index ff884e3b7393b39b86114338fe2af11e384e1fa0..73209c88735a59bb2dc5c2b73bb3ba0c7d03dd56 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -294,11 +294,10 @@ mod test { async fn test_scroll(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; - let (line_height, visible_line_count) = cx.editor(|editor, window, _cx| { + let (line_height, visible_line_count) = cx.update_editor(|editor, window, cx| { ( editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()), editor.visible_line_count().unwrap(), diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 2e80a08eb824b93783bf1249970e5e7ad7378ff2..36a529da5da4be4ea3437a766daa1bc18bcfdd68 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -1,6 +1,5 @@ -use editor::{Editor, EditorSettings, VimFlavor}; +use editor::{Editor, EditorSettings}; use gpui::{Action, Context, Window, actions}; - use language::Point; use schemars::JsonSchema; use search::{BufferSearchBar, SearchOptions, buffer_search}; @@ -196,7 +195,7 @@ impl Vim { prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode, - is_helix_regex_search: false, + helix_select: false, } }); } @@ -220,7 +219,7 @@ impl Vim { let new_selections = self.editor_selections(window, cx); let result = pane.update(cx, |pane, cx| { let search_bar = pane.toolbar().read(cx).item_of_type::()?; - if self.search.is_helix_regex_search { + if self.search.helix_select { search_bar.update(cx, |search_bar, cx| { search_bar.select_all_matches(&Default::default(), window, cx) }); @@ -241,8 +240,7 @@ impl Vim { count = count.saturating_sub(1) } self.search.count = 1; - let collapse = !self.mode.is_helix(); - search_bar.select_match(direction, count, collapse, window, cx); + search_bar.select_match(direction, count, window, cx); search_bar.focus_editor(&Default::default(), window, cx); let prior_selections: Vec<_> = self.search.prior_selections.drain(..).collect(); @@ -309,8 +307,7 @@ impl Vim { if !search_bar.has_active_match() || !search_bar.show(window, cx) { return false; } - let collapse = !self.mode.is_helix(); - search_bar.select_match(direction, count, collapse, window, cx); + search_bar.select_match(direction, count, window, cx); true }) }); @@ -319,7 +316,6 @@ impl Vim { } let new_selections = self.editor_selections(window, cx); - self.search_motion( Motion::ZedSearchResult { prior_selections, @@ -385,8 +381,7 @@ impl Vim { cx.spawn_in(window, async move |_, cx| { search.await?; search_bar.update_in(cx, |search_bar, window, cx| { - let collapse = editor::vim_flavor(cx) == Some(VimFlavor::Vim); - search_bar.select_match(direction, count, collapse, window, cx); + search_bar.select_match(direction, count, window, cx); vim.update(cx, |vim, cx| { let new_selections = vim.editor_selections(window, cx); @@ -449,7 +444,7 @@ impl Vim { cx.spawn_in(window, async move |_, cx| { search.await?; search_bar.update_in(cx, |search_bar, window, cx| { - search_bar.select_match(direction, 1, true, window, cx) + search_bar.select_match(direction, 1, window, cx) })?; anyhow::Ok(()) }) @@ -511,7 +506,12 @@ impl Vim { search_bar.is_contains_uppercase(&search), ); } else { - options.set(SearchOptions::CASE_SENSITIVE, false) + // Fallback: no explicit i/I flags and smartcase disabled; + // use global editor.search.case_sensitive. + options.set( + SearchOptions::CASE_SENSITIVE, + EditorSettings::get_global(cx).search.case_sensitive, + ) } if !replacement.flag_g { diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index d5a45fca544d61735f62a8f46e849db2c009847f..9920b8fc88d86625a1eb6642f59c894730905c77 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -11,7 +11,6 @@ use editor::{ClipboardSelection, Editor, SelectionEffects}; use gpui::Context; use gpui::Window; use language::Point; -use multi_buffer::MultiBufferRow; use settings::Settings; struct HighlightOnYank; @@ -81,7 +80,11 @@ impl Vim { start_positions.insert(selection.id, start_position); }); }); - vim.yank_selections_content(editor, MotionKind::Exclusive, window, cx); + let kind = match object.target_visual_mode(vim.mode, around) { + Mode::VisualLine => MotionKind::Linewise, + _ => MotionKind::Exclusive, + }; + vim.yank_selections_content(editor, kind, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.move_with(|_, selection| { let (head, goal) = start_positions.remove(&selection.id).unwrap(); @@ -194,11 +197,14 @@ impl Vim { if kind.linewise() { text.push('\n'); } - clipboard_selections.push(ClipboardSelection { - len: text.len() - initial_len, - is_entire_line: false, - first_line_indent: buffer.indent_size_for_line(MultiBufferRow(start.row)).len, - }); + clipboard_selections.push(ClipboardSelection::for_buffer( + text.len() - initial_len, + false, + start..end, + &buffer, + editor.project(), + cx, + )); } } @@ -223,7 +229,7 @@ impl Vim { editor.highlight_background::( &ranges_to_highlight, - |colors| colors.colors().editor_document_highlight_read_background, + |_, colors| colors.colors().editor_document_highlight_read_background, cx, ); cx.spawn(async move |this, cx| { diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index f361dd8f274879f067c49bf04c0a73ebbc34be06..02150332405c6d5ea4d5dd78f477348be968fddf 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -6,7 +6,7 @@ use crate::{ state::{Mode, Operator}, }; use editor::{ - Bias, DisplayPoint, Editor, ToOffset, + Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, ToOffset, display_map::{DisplaySnapshot, ToDisplayPoint}, movement::{self, FindRange}, }; @@ -81,8 +81,8 @@ pub struct CandidateRange { #[derive(Debug, Clone)] pub struct CandidateWithRanges { candidate: CandidateRange, - open_range: Range, - close_range: Range, + open_range: Range, + close_range: Range, } /// Selects text at the same indentation level. @@ -120,7 +120,7 @@ struct CurlyBrackets { opening: bool, } -fn cover_or_next, Range)>>( +fn cover_or_next, Range)>>( candidates: Option, caret: DisplayPoint, map: &DisplaySnapshot, @@ -128,7 +128,7 @@ fn cover_or_next, Range)>>( let caret_offset = caret.to_offset(map, Bias::Left); let mut covering = vec![]; let mut next_ones = vec![]; - let snapshot = &map.buffer_snapshot(); + let snapshot = map.buffer_snapshot(); if let Some(ranges) = candidates { for (open_range, close_range) in ranges { @@ -171,7 +171,7 @@ fn cover_or_next, Range)>>( if !next_ones.is_empty() { return next_ones.into_iter().min_by_key(|r| { let start = r.candidate.start.to_offset(map, Bias::Left); - (start as isize - caret_offset as isize).abs() + (start.0 as isize - caret_offset.0 as isize).abs() }); } @@ -181,8 +181,8 @@ fn cover_or_next, Range)>>( type DelimiterPredicate = dyn Fn(&BufferSnapshot, usize, usize) -> bool; struct DelimiterRange { - open: Range, - close: Range, + open: Range, + close: Range, } impl DelimiterRange { @@ -221,14 +221,14 @@ fn find_mini_delimiters( .buffer_snapshot() .bracket_ranges(visible_line_range) .map(|ranges| { - ranges.filter_map(move |(open, close)| { + ranges.filter_map(|(open, close)| { // Convert the ranges from multibuffer space to buffer space as // that is what `is_valid_delimiter` expects, otherwise it might // panic as the values might be out of bounds. let buffer_open = excerpt.map_range_to_buffer(open.clone()); let buffer_close = excerpt.map_range_to_buffer(close.clone()); - if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) { + if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) { Some((open, close)) } else { None @@ -252,8 +252,12 @@ fn find_mini_delimiters( Some( DelimiterRange { - open: open_bracket, - close: close_bracket, + open: excerpt.map_range_from_buffer( + BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end), + ), + close: excerpt.map_range_from_buffer( + BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end), + ), } .to_display_range(map, around), ) @@ -899,7 +903,7 @@ pub fn surrounding_html_tag( // Find the most closest to current offset let mut cursor = buffer.syntax_layer_at(offset)?.node().walk(); let mut last_child_node = cursor.node(); - while cursor.goto_first_child_for_byte(offset).is_some() { + while cursor.goto_first_child_for_byte(offset.0).is_some() { last_child_node = cursor.node(); } @@ -907,7 +911,7 @@ pub fn surrounding_html_tag( while let Some(cur_node) = last_child_node { if cur_node.child_count() >= 2 { let first_child = cur_node.child(0); - let last_child = cur_node.child(cur_node.child_count() - 1); + let last_child = cur_node.child(cur_node.child_count() as u32 - 1); if let (Some(first_child), Some(last_child)) = (first_child, last_child) { let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); @@ -916,10 +920,16 @@ pub fn surrounding_html_tag( - range.start.to_offset(map, Bias::Left) <= 1 { - offset <= last_child.end_byte() + offset.0 <= last_child.end_byte() } else { - range.start.to_offset(map, Bias::Left) >= first_child.start_byte() - && range.end.to_offset(map, Bias::Left) <= last_child.start_byte() + 1 + excerpt + .map_offset_to_buffer(range.start.to_offset(map, Bias::Left)) + .0 + >= first_child.start_byte() + && excerpt + .map_offset_to_buffer(range.end.to_offset(map, Bias::Left)) + .0 + <= last_child.start_byte() + 1 }; if open_tag.is_some() && open_tag == close_tag && is_valid { let range = if around { @@ -927,6 +937,7 @@ pub fn surrounding_html_tag( } else { first_child.byte_range().end..last_child.byte_range().start }; + let range = BufferOffset(range.start)..BufferOffset(range.end); if excerpt.contains_buffer_range(range.clone()) { let result = excerpt.map_range_from_buffer(range); return Some( @@ -1093,7 +1104,8 @@ fn text_object( .collect(); matches.sort_by_key(|r| r.end - r.start); if let Some(buffer_range) = matches.first() { - let range = excerpt.map_range_from_buffer(buffer_range.clone()); + let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); + let range = excerpt.map_range_from_buffer(buffer_range); return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); } @@ -1113,10 +1125,12 @@ fn text_object( if let Some(buffer_range) = matches.first() && !buffer_range.is_empty() { - let range = excerpt.map_range_from_buffer(buffer_range.clone()); + let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); + let range = excerpt.map_range_from_buffer(buffer_range); return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); } - let buffer_range = excerpt.map_range_from_buffer(around_range.clone()); + let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end); + let buffer_range = excerpt.map_range_from_buffer(around_range); return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map)); } @@ -1134,9 +1148,9 @@ fn argument( fn comma_delimited_range_at( buffer: &BufferSnapshot, - mut offset: usize, + mut offset: BufferOffset, include_comma: bool, - ) -> Option> { + ) -> Option> { // Seek to the first non-whitespace character offset += buffer .chars_at(offset) @@ -1151,7 +1165,7 @@ fn argument( } // If the cursor is outside the brackets, ignore them - if open.start == offset || close.end == offset { + if open.start == offset.0 || close.end == offset.0 { return false; } @@ -1167,7 +1181,7 @@ fn argument( let (open_bracket, close_bracket) = buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?; - let inner_bracket_range = open_bracket.end..close_bracket.start; + let inner_bracket_range = BufferOffset(open_bracket.end)..BufferOffset(close_bracket.start); let layer = buffer.syntax_layer_at(offset)?; let node = layer.node(); @@ -1186,7 +1200,7 @@ fn argument( parent_covers_bracket_range = covers_bracket_range; // Unable to find a child node with a parent that covers the bracket range, so no argument to select - cursor.goto_first_child_for_byte(offset)?; + cursor.goto_first_child_for_byte(offset.0)?; } let mut argument_node = cursor.node(); @@ -1256,7 +1270,7 @@ fn argument( } } - Some(start..end) + Some(BufferOffset(start)..BufferOffset(end)) } let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?; @@ -1387,7 +1401,7 @@ fn is_possible_sentence_start(character: char) -> bool { const SENTENCE_END_PUNCTUATION: &[char] = &['.', '!', '?']; const SENTENCE_END_FILLERS: &[char] = &[')', ']', '"', '\'']; const SENTENCE_END_WHITESPACE: &[char] = &[' ', '\t', '\n']; -fn is_sentence_end(map: &DisplaySnapshot, offset: usize) -> bool { +fn is_sentence_end(map: &DisplaySnapshot, offset: MultiBufferOffset) -> bool { let mut next_chars = map.buffer_chars_at(offset).peekable(); if let Some((char, _)) = next_chars.next() { // We are at a double newline. This position is a sentence end. @@ -2368,9 +2382,10 @@ mod test { Mode::Insert, ); - cx.set_state("let a = (test::call(), 'p', my_macro!{ˇ});", Mode::Normal); - cx.simulate_keystrokes("c a a"); - cx.assert_state("let a = (test::call(), 'p'ˇ);", Mode::Insert); + // TODO regressed with the up-to-date Rust grammar. + // cx.set_state("let a = (test::call(), 'p', my_macro!{ˇ});", Mode::Normal); + // cx.simulate_keystrokes("c a a"); + // cx.assert_state("let a = (test::call(), 'p'ˇ);", Mode::Insert); cx.set_state("let a = [test::call(ˇ), 300];", Mode::Normal); cx.simulate_keystrokes("c i a"); @@ -2792,9 +2807,8 @@ mod test { for (keystrokes, initial_state, expected_state, expected_mode) in TEST_CASES { cx.set_state(initial_state, Mode::Normal); - + cx.buffer(|buffer, _| buffer.parsing_idle()).await; cx.simulate_keystrokes(keystrokes); - cx.assert_state(expected_state, *expected_mode); } @@ -2815,9 +2829,8 @@ mod test { for (keystrokes, initial_state, mode) in INVALID_CASES { cx.set_state(initial_state, Mode::Normal); - + cx.buffer(|buffer, _| buffer.parsing_idle()).await; cx.simulate_keystrokes(keystrokes); - cx.assert_state(initial_state, *mode); } } @@ -3170,9 +3183,8 @@ mod test { for (keystrokes, initial_state, expected_state, expected_mode) in TEST_CASES { cx.set_state(initial_state, Mode::Normal); - + cx.buffer(|buffer, _| buffer.parsing_idle()).await; cx.simulate_keystrokes(keystrokes); - cx.assert_state(expected_state, *expected_mode); } @@ -3193,9 +3205,8 @@ mod test { for (keystrokes, initial_state, mode) in INVALID_CASES { cx.set_state(initial_state, Mode::Normal); - + cx.buffer(|buffer, _| buffer.parsing_idle()).await; cx.simulate_keystrokes(keystrokes); - cx.assert_state(initial_state, *mode); } } diff --git a/crates/vim/src/replace.rs b/crates/vim/src/replace.rs index 93c30141daeac21805e8ea1aab610988a09a9635..63d452f84bfd5ee1cea8970698962169dc8fe94a 100644 --- a/crates/vim/src/replace.rs +++ b/crates/vim/src/replace.rs @@ -273,7 +273,7 @@ impl Vim { let ranges = [new_range]; editor.highlight_background::( &ranges, - |theme| theme.colors().editor_document_highlight_read_background, + |_, theme| theme.colors().editor_document_highlight_read_background, cx, ); } diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 3f4fc99584f96754afc5342d299a502eb9a3dbad..2a8aa91063be89ebd616a2f9601f90c912cee8b5 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -38,8 +38,9 @@ use util::rel_path::RelPath; use workspace::searchable::Direction; use workspace::{Workspace, WorkspaceDb, WorkspaceId}; -#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)] +#[derive(Clone, Copy, Default, Debug, PartialEq, Serialize, Deserialize)] pub enum Mode { + #[default] Normal, Insert, Replace, @@ -66,22 +67,12 @@ impl Display for Mode { } impl Mode { - pub fn is_visual(self) -> bool { + pub fn is_visual(&self) -> bool { match self { Self::Visual | Self::VisualLine | Self::VisualBlock | Self::HelixSelect => true, Self::Normal | Self::Insert | Self::Replace | Self::HelixNormal => false, } } - - pub fn is_helix(self) -> bool { - matches!(self, Mode::HelixNormal | Mode::HelixSelect) - } -} - -impl Default for Mode { - fn default() -> Self { - Self::Normal - } } #[derive(Clone, Debug, PartialEq)] @@ -226,6 +217,7 @@ pub struct VimGlobals { pub forced_motion: bool, pub stop_recording_after_next_action: bool, pub ignore_current_insertion: bool, + pub recording_count: Option, pub recorded_count: Option, pub recording_actions: Vec, pub recorded_actions: Vec, @@ -558,6 +550,10 @@ impl MarksState { let buffer = multibuffer.read(cx).as_singleton(); let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx)); + if self.is_global_mark(&name) && self.global_marks.contains_key(&name) { + self.delete_mark(name.clone(), multibuffer, cx); + } + let Some(abs_path) = abs_path else { self.multibuffer_marks .entry(multibuffer.entity_id()) @@ -581,7 +577,7 @@ impl MarksState { let buffer_id = buffer.read(cx).remote_id(); self.buffer_marks.entry(buffer_id).or_default().insert( - name, + name.clone(), anchors .into_iter() .map(|anchor| anchor.text_anchor) @@ -590,6 +586,10 @@ impl MarksState { if !self.watched_buffers.contains_key(&buffer_id) { self.watch_buffer(MarkLocation::Path(abs_path.clone()), &buffer, cx) } + if self.is_global_mark(&name) { + self.global_marks + .insert(name, MarkLocation::Path(abs_path.clone())); + } self.serialize_buffer_marks(abs_path, &buffer, cx) } @@ -614,7 +614,7 @@ impl MarksState { let text_anchors = anchors.get(name)?; let anchors = text_anchors .iter() - .map(|anchor| Anchor::in_buffer(excerpt_id, buffer_id, *anchor)) + .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor)) .collect(); return Some(Mark::Local(anchors)); } @@ -907,6 +907,7 @@ impl VimGlobals { if self.stop_recording_after_next_action { self.dot_recording = false; self.recorded_actions = std::mem::take(&mut self.recording_actions); + self.recorded_count = self.recording_count.take(); self.stop_recording_after_next_action = false; } } @@ -933,6 +934,7 @@ impl VimGlobals { if self.stop_recording_after_next_action { self.dot_recording = false; self.recorded_actions = std::mem::take(&mut self.recording_actions); + self.recorded_count = self.recording_count.take(); self.stop_recording_after_next_action = false; } } @@ -996,7 +998,7 @@ pub struct SearchState { pub prior_selections: Vec>, pub prior_operator: Option, pub prior_mode: Mode, - pub is_helix_regex_search: bool, + pub helix_select: bool, } impl Operator { diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index bc817e2d4871a0be07e8c100b332f5630dcec711..b3f9307aac3df18334cf24a619dc640ccb625e24 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -4,7 +4,7 @@ use crate::{ object::{Object, surrounding_markers}, state::Mode, }; -use editor::{Bias, movement}; +use editor::{Bias, MultiBufferOffset, movement}; use gpui::{Context, Window}; use language::BracketPair; @@ -175,7 +175,7 @@ impl Vim { while let Some((ch, offset)) = chars_and_offset.next() { if ch.to_string() == pair.start { let start = offset; - let mut end = start + 1; + let mut end = start + 1usize; if surround && let Some((next_ch, _)) = chars_and_offset.peek() && next_ch.eq(&' ') @@ -193,7 +193,7 @@ impl Vim { while let Some((ch, offset)) = reverse_chars_and_offsets.next() { if ch.to_string() == pair.end { let mut start = offset; - let end = start + 1; + let end = start + 1usize; if surround && let Some((next_ch, _)) = reverse_chars_and_offsets.peek() && next_ch.eq(&' ') @@ -282,6 +282,7 @@ impl Vim { // that the end replacement string does not exceed // this value. Helpful when dealing with newlines. let mut edit_len = 0; + let mut open_range_end = MultiBufferOffset(0); let mut chars_and_offset = display_map .buffer_chars_at(range.start.to_offset(&display_map, Bias::Left)) .peekable(); @@ -290,11 +291,11 @@ impl Vim { if ch.to_string() == will_replace_pair.start { let mut open_str = pair.start.clone(); let start = offset; - let mut end = start + 1; + open_range_end = start + 1usize; while let Some((next_ch, _)) = chars_and_offset.next() - && next_ch.to_string() == " " + && next_ch == ' ' { - end += 1; + open_range_end += 1; if preserve_space { open_str.push(next_ch); @@ -305,8 +306,8 @@ impl Vim { open_str.push(' '); }; - edit_len = end - start; - edits.push((start..end, open_str)); + edit_len = open_range_end - start; + edits.push((start..open_range_end, open_str)); anchors.push(start..start); break; } @@ -321,10 +322,11 @@ impl Vim { if ch.to_string() == will_replace_pair.end { let mut close_str = String::new(); let mut start = offset; - let end = start + 1; + let end = start + 1usize; while let Some((next_ch, _)) = reverse_chars_and_offsets.next() - && next_ch.to_string() == " " + && next_ch == ' ' && close_str.len() < edit_len - 1 + && start > open_range_end { start -= 1; @@ -1236,6 +1238,23 @@ mod test { Mode::Normal, ); + // test spaces with quote change surrounds + cx.set_state( + indoc! {" + fn test_surround() { + \"ˇ \" + };"}, + Mode::Normal, + ); + cx.simulate_keystrokes("c s \" '"); + cx.assert_state( + indoc! {" + fn test_surround() { + ˇ' ' + };"}, + Mode::Normal, + ); + // Currently, the same test case but using the closing bracket `]` // actually removes a whitespace before the closing bracket, something // that might need to be fixed? diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index cb02a3ab0fafdeec254e8b3722bdd877fbeda0e2..4c61479157268e4f0276bddf9dd1eb913284d27e 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -7,7 +7,7 @@ use std::{sync::Arc, time::Duration}; use collections::HashMap; use command_palette::CommandPalette; use editor::{ - AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, + AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, MultiBufferOffset, actions::{DeleteLine, WrapSelectionsInTag}, code_context_menus::CodeContextMenu, display_map::DisplayRow, @@ -16,7 +16,7 @@ use editor::{ use futures::StreamExt; use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px}; use itertools::Itertools; -use language::{Language, LanguageConfig, Point}; +use language::{CursorShape, Language, LanguageConfig, Point}; pub use neovim_backed_test_context::*; use settings::SettingsStore; use ui::Pixels; @@ -908,6 +908,9 @@ fn assert_pending_input(cx: &mut VimTestContext, expected: &str) { .map(|highlight| highlight.to_offset(&snapshot.buffer_snapshot())) .collect::>(), ranges + .iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) + .collect::>() ) }); } @@ -967,7 +970,7 @@ async fn test_jk_delay(cx: &mut gpui::TestAppContext) { .iter() .map(|highlight| highlight.to_offset(&snapshot.buffer_snapshot())) .collect::>(), - vec![0..1] + vec![MultiBufferOffset(0)..MultiBufferOffset(1)] ) }); cx.executor().advance_clock(Duration::from_millis(500)); @@ -1139,6 +1142,26 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { cx.assert_state("const afterˇ = 2; console.log(after)", Mode::Normal) } +#[gpui::test] +async fn test_go_to_definition(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new_typescript(cx).await; + + cx.set_state("const before = 2; console.log(beforˇe)", Mode::Normal); + let def_range = cx.lsp_range("const «beforeˇ» = 2; console.log(before)"); + let mut go_to_request = + cx.set_request_handler::(move |url, _, _| async move { + Ok(Some(lsp::GotoDefinitionResponse::Scalar( + lsp::Location::new(url.clone(), def_range), + ))) + }); + + cx.simulate_keystrokes("g d"); + go_to_request.next().await.unwrap(); + cx.run_until_parked(); + + cx.assert_state("const ˇbefore = 2; console.log(before)", Mode::Normal); +} + #[perf] #[gpui::test] async fn test_remap(cx: &mut gpui::TestAppContext) { @@ -2230,6 +2253,79 @@ async fn test_paragraph_multi_delete(cx: &mut gpui::TestAppContext) { cx.shared_state().await.assert_eq(indoc! {"ˇ"}); } +#[perf] +#[gpui::test] +async fn test_yank_paragraph_with_paste(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! { + " + first paragraph + ˇstill first + + second paragraph + still second + + third paragraph + " + }) + .await; + + cx.simulate_shared_keystrokes("y a p").await; + cx.shared_clipboard() + .await + .assert_eq("first paragraph\nstill first\n\n"); + + cx.simulate_shared_keystrokes("j j p").await; + cx.shared_state().await.assert_eq(indoc! { + " + first paragraph + still first + + ˇfirst paragraph + still first + + second paragraph + still second + + third paragraph + " + }); +} + +#[perf] +#[gpui::test] +async fn test_change_paragraph(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! { + " + first paragraph + ˇstill first + + second paragraph + still second + + third paragraph + " + }) + .await; + + cx.simulate_shared_keystrokes("c a p").await; + cx.shared_clipboard() + .await + .assert_eq("first paragraph\nstill first\n\n"); + + cx.simulate_shared_keystrokes("escape").await; + cx.shared_state().await.assert_eq(indoc! { + " + ˇ + second paragraph + still second + + third paragraph + " + }); +} + #[perf] #[gpui::test] async fn test_multi_cursor_replay(cx: &mut gpui::TestAppContext) { @@ -2303,7 +2399,7 @@ async fn test_clipping_on_mode_change(cx: &mut gpui::TestAppContext) { .end; editor.last_bounds().unwrap().origin + editor - .display_to_pixel_point(current_head, &snapshot, window) + .display_to_pixel_point(current_head, &snapshot, window, cx) .unwrap() }); pixel_position.x += px(100.); @@ -2365,3 +2461,43 @@ async fn test_wrap_selections_in_tag_line_mode(cx: &mut gpui::TestAppContext) { Mode::VisualLine, ); } + +#[gpui::test] +async fn test_repeat_grouping_41735(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + // typically transaction gropuing is disabled in tests, but here we need to test it. + cx.update_buffer(|buffer, _cx| buffer.set_group_interval(Duration::from_millis(300))); + + cx.set_shared_state("ˇ").await; + + cx.simulate_shared_keystrokes("i a escape").await; + cx.simulate_shared_keystrokes(". . .").await; + cx.shared_state().await.assert_eq("ˇaaaa"); + cx.simulate_shared_keystrokes("u").await; + cx.shared_state().await.assert_eq("ˇaaa"); +} + +#[gpui::test] +async fn test_deactivate(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.cursor_shape = Some(settings::CursorShape::Underline); + }); + }); + + // Assert that, while in `Normal` mode, the cursor shape is `Block` but, + // after deactivating vim mode, it should revert to the one specified in the + // user's settings, if set. + cx.update_editor(|editor, _window, _cx| { + assert_eq!(editor.cursor_shape(), CursorShape::Block); + }); + + cx.disable_vim(); + + cx.update_editor(|editor, _window, _cx| { + assert_eq!(editor.cursor_shape(), CursorShape::Underline); + }); +} diff --git a/crates/vim/src/test/neovim_backed_test_context.rs b/crates/vim/src/test/neovim_backed_test_context.rs index 9d2452ab20a6a99138c4b0d86f597f084a0876d6..d20464ccc4b36c8f7024db6bd63558a6292e7c68 100644 --- a/crates/vim/src/test/neovim_backed_test_context.rs +++ b/crates/vim/src/test/neovim_backed_test_context.rs @@ -31,6 +31,7 @@ pub struct SharedState { } impl SharedState { + /// Assert that both Zed and NeoVim have the same content and mode. #[track_caller] pub fn assert_matches(&self) { if self.neovim != self.editor || self.neovim_mode != self.editor_mode { @@ -183,6 +184,26 @@ impl NeovimBackedTestContext { } } + pub async fn new_markdown_with_rust(cx: &mut gpui::TestAppContext) -> NeovimBackedTestContext { + #[cfg(feature = "neovim")] + cx.executor().allow_parking(); + let thread = thread::current(); + let test_name = thread + .name() + .expect("thread is not named") + .split(':') + .next_back() + .unwrap() + .to_string(); + Self { + cx: VimTestContext::new_markdown_with_rust(cx).await, + neovim: NeovimConnection::new(test_name).await, + + last_set_state: None, + recent_keystrokes: Default::default(), + } + } + pub async fn new_typescript(cx: &mut gpui::TestAppContext) -> NeovimBackedTestContext { #[cfg(feature = "neovim")] cx.executor().allow_parking(); @@ -283,11 +304,10 @@ impl NeovimBackedTestContext { self.neovim.set_option(&format!("scrolloff={}", 3)).await; // +2 to account for the vim command UI at the bottom. self.neovim.set_option(&format!("lines={}", rows + 2)).await; - let (line_height, visible_line_count) = self.editor(|editor, window, _cx| { + let (line_height, visible_line_count) = self.update_editor(|editor, window, cx| { ( editor - .style() - .unwrap() + .style(cx) .text .line_height_in_pixels(window.rem_size()), editor.visible_line_count().unwrap(), diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 6300e3a3fcc079e064ef0e26c3e218b4032aa890..2d5ed4227dcc263f56cfa0bcb337f5673df8ef3c 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -1,8 +1,9 @@ use std::ops::{Deref, DerefMut}; use editor::test::editor_lsp_test_context::EditorLspTestContext; -use gpui::{Context, Entity, SemanticVersion, UpdateGlobal}; +use gpui::{Context, Entity, UpdateGlobal}; use search::{BufferSearchBar, project_search::ProjectSearchBar}; +use semver::Version; use crate::{state::Operator, *}; @@ -19,14 +20,16 @@ impl VimTestContext { cx.update(|cx| { let settings = SettingsStore::test(cx); cx.set_global(settings); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(Version::new(0, 0, 0), cx); command_palette::init(cx); project_panel::init(cx); + outline_panel::init(cx); git_ui::init(cx); crate::init(cx); search::init(cx); theme::init(theme::LoadThemes::JustBase, cx); settings_ui::init(cx); + markdown_preview::init(cx); }); } @@ -41,6 +44,11 @@ impl VimTestContext { Self::new_with_lsp(EditorLspTestContext::new_html(cx).await, true) } + pub async fn new_markdown_with_rust(cx: &mut gpui::TestAppContext) -> VimTestContext { + Self::init(cx); + Self::new_with_lsp(EditorLspTestContext::new_markdown_with_rust(cx).await, true) + } + pub async fn new_typescript(cx: &mut gpui::TestAppContext) -> VimTestContext { Self::init(cx); Self::new_with_lsp( @@ -54,6 +62,7 @@ impl VimTestContext { prepare_provider: Some(true), work_done_progress_options: Default::default(), })), + definition_provider: Some(lsp::OneOf::Left(true)), ..Default::default() }, cx, diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6ffdbcce910c10229dc7c2e6df95055c5c812f28..26fec968fb261fbb80a9f84211357623147ca0f4 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -19,10 +19,11 @@ mod state; mod surrounds; mod visual; +use crate::normal::paste::Paste as VimPaste; use collections::HashMap; use editor::{ - Anchor, Bias, Editor, EditorEvent, EditorSettings, HideMouseCursorOrigin, SelectionEffects, - ToPoint, + Anchor, Bias, Editor, EditorEvent, EditorSettings, HideMouseCursorOrigin, MultiBufferOffset, + SelectionEffects, ToPoint, actions::Paste, movement::{self, FindRange}, }; @@ -183,8 +184,6 @@ actions!( InnerObject, /// Maximizes the current pane. MaximizePane, - /// Opens the default keymap file. - OpenDefaultKeymap, /// Resets all pane sizes to default. ResetPaneSizes, /// Resizes the pane to the right. @@ -314,7 +313,7 @@ pub fn init(cx: &mut App) { workspace.register_action(|_, _: &ToggleProjectPanelFocus, window, cx| { if Vim::take_count(cx).is_none() { - window.dispatch_action(project_panel::ToggleFocus.boxed_clone(), cx); + window.dispatch_action(zed_actions::project_panel::ToggleFocus.boxed_clone(), cx); } }); @@ -343,7 +342,7 @@ pub fn init(cx: &mut App) { }; }); - workspace.register_action(|_, _: &OpenDefaultKeymap, _, cx| { + workspace.register_action(|_, _: &zed_actions::vim::OpenDefaultKeymap, _, cx| { cx.emit(workspace::Event::OpenBundledFile { text: settings::vim_keymap(), title: "Default Vim Bindings", @@ -924,6 +923,10 @@ impl Vim { cx, |vim, _: &editor::actions::Paste, window, cx| match vim.mode { Mode::Replace => vim.paste_replace(window, cx), + Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { + vim.selected_register.replace('+'); + vim.paste(&VimPaste::default(), window, cx); + } _ => { vim.update_editor(cx, |_, editor, cx| editor.paste(&Paste, window, cx)); } @@ -952,8 +955,14 @@ impl Vim { } fn deactivate(editor: &mut Editor, cx: &mut Context) { - editor.set_cursor_shape(CursorShape::Bar, cx); + editor.set_cursor_shape( + EditorSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(), + cx, + ); editor.set_clip_at_line_ends(false, cx); + editor.set_collapse_matches(false); editor.set_input_enabled(true); editor.set_autoindent(true); editor.selections.set_line_mode(false); @@ -1255,7 +1264,7 @@ impl Vim { }; if global_state.dot_recording { - global_state.recorded_count = count; + global_state.recording_count = count; } count } @@ -1389,7 +1398,7 @@ impl Vim { let newest_selection_empty = editor.update(cx, |editor, cx| { editor .selections - .newest::(&editor.display_snapshot(cx)) + .newest::(&editor.display_snapshot(cx)) .is_empty() }); let editor = editor.read(cx); @@ -1489,7 +1498,7 @@ impl Vim { let snapshot = &editor.snapshot(window, cx); let selection = editor .selections - .newest::(&snapshot.display_snapshot); + .newest::(&snapshot.display_snapshot); let snapshot = snapshot.buffer_snapshot(); let (range, kind) = @@ -1513,7 +1522,7 @@ impl Vim { if !globals.dot_replaying { globals.dot_recording = true; globals.recording_actions = Default::default(); - globals.recorded_count = None; + globals.recording_count = None; let selections = self.editor().map(|editor| { editor.update(cx, |editor, cx| { @@ -1583,6 +1592,7 @@ impl Vim { .recording_actions .push(ReplayableAction::Action(action.boxed_clone())); globals.recorded_actions = mem::take(&mut globals.recording_actions); + globals.recorded_count = globals.recording_count.take(); globals.dot_recording = false; globals.stop_recording_after_next_action = false; } @@ -1929,8 +1939,11 @@ impl Vim { self.update_editor(cx, |vim, editor, cx| { editor.set_cursor_shape(vim.cursor_shape(cx), cx); editor.set_clip_at_line_ends(vim.clip_at_line_ends(), cx); + let collapse_matches = !HelixModeSetting::get_global(cx).0; + editor.set_collapse_matches(collapse_matches); editor.set_input_enabled(vim.editor_input_enabled()); editor.set_autoindent(vim.should_autoindent()); + editor.set_cursor_offset_on_selection(vim.mode.is_visual()); editor .selections .set_line_mode(matches!(vim.mode, Mode::VisualLine)); diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 498c4b4dc6ec6ad8af4f47bb6ea5044a5fcd3c0a..3c6f237435e3924a907e059ed1a878641c287e7e 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use collections::HashMap; use editor::{ - Bias, DisplayPoint, Editor, SelectionEffects, + Bias, DisplayPoint, Editor, MultiBufferOffset, SelectionEffects, display_map::{DisplaySnapshot, ToDisplayPoint}, movement, }; @@ -371,10 +371,12 @@ impl Vim { loop { let laid_out_line = map.layout_row(row, &text_layout_details); - let start = - DisplayPoint::new(row, laid_out_line.index_for_x(positions.start) as u32); + let start = DisplayPoint::new( + row, + laid_out_line.closest_index_for_x(positions.start) as u32, + ); let mut end = - DisplayPoint::new(row, laid_out_line.index_for_x(positions.end) as u32); + DisplayPoint::new(row, laid_out_line.closest_index_for_x(positions.end) as u32); if end <= start { if start.column() == map.line_len(start.row()) { end = start; @@ -776,7 +778,7 @@ impl Vim { { let range = row_range.start.to_offset(&display_map, Bias::Right) ..row_range.end.to_offset(&display_map, Bias::Right); - let text = text.repeat(range.len()); + let text = text.repeat(range.end - range.start); edits.push((range, text)); } } @@ -842,9 +844,12 @@ impl Vim { return; }; let vim_is_normal = self.mode == Mode::Normal; - let mut start_selection = 0usize; - let mut end_selection = 0usize; + let mut start_selection = MultiBufferOffset(0); + let mut end_selection = MultiBufferOffset(0); + self.update_editor(cx, |_, editor, _| { + editor.set_collapse_matches(false); + }); if vim_is_normal { pane.update(cx, |pane, cx| { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() @@ -855,7 +860,7 @@ impl Vim { } // without update_match_index there is a bug when the cursor is before the first match search_bar.update_match_index(window, cx); - search_bar.select_match(direction.opposite(), 1, false, window, cx); + search_bar.select_match(direction.opposite(), 1, window, cx); }); } }); @@ -863,7 +868,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let latest = editor .selections - .newest::(&editor.display_snapshot(cx)); + .newest::(&editor.display_snapshot(cx)); start_selection = latest.start; end_selection = latest.end; }); @@ -873,7 +878,7 @@ impl Vim { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() { search_bar.update(cx, |search_bar, cx| { search_bar.update_match_index(window, cx); - search_bar.select_match(direction, count, false, window, cx); + search_bar.select_match(direction, count, window, cx); match_exists = search_bar.match_exists(window, cx); }); } @@ -886,7 +891,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let latest = editor .selections - .newest::(&editor.display_snapshot(cx)); + .newest::(&editor.display_snapshot(cx)); if vim_is_normal { start_selection = latest.start; end_selection = latest.end; @@ -900,6 +905,7 @@ impl Vim { editor.change_selections(Default::default(), window, cx, |s| { s.select_ranges([start_selection..end_selection]); }); + editor.set_collapse_matches(true); }); match self.maybe_pop_operator() { diff --git a/crates/vim/test_data/test_change_paragraph.json b/crates/vim/test_data/test_change_paragraph.json new file mode 100644 index 0000000000000000000000000000000000000000..6d235d9f367d5c375df59f3567b2ac1435f6a0a7 --- /dev/null +++ b/crates/vim/test_data/test_change_paragraph.json @@ -0,0 +1,8 @@ +{"Put":{"state":"first paragraph\nˇstill first\n\nsecond paragraph\nstill second\n\nthird paragraph\n"}} +{"Key":"c"} +{"Key":"a"} +{"Key":"p"} +{"Get":{"state":"ˇ\nsecond paragraph\nstill second\n\nthird paragraph\n","mode":"Insert"}} +{"ReadRegister":{"name":"\"","value":"first paragraph\nstill first\n\n"}} +{"Key":"escape"} +{"Get":{"state":"ˇ\nsecond paragraph\nstill second\n\nthird paragraph\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_matching_tags.json b/crates/vim/test_data/test_matching_tags.json index bb4f5fd450dee78319a23e8026b2cb1c4d224b19..b401033a941f201ddcf9c3a4128659ae27d787b4 100644 --- a/crates/vim/test_data/test_matching_tags.json +++ b/crates/vim/test_data/test_matching_tags.json @@ -13,3 +13,8 @@ {"Put":{"state":"\n \n"}} {"Key":"%"} {"Get":{"state":"\n ˇ\n","mode":"Normal"}} +{"Put":{"state":"\n \n \n"}} +{"Key":"%"} +{"Get":{"state":"\n \n <ˇ/body>\n","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"\n <ˇbody>\n \n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_repeat_clear_count.json b/crates/vim/test_data/test_repeat_clear_count.json new file mode 100644 index 0000000000000000000000000000000000000000..352c6ca4a8d2ee0534d3b695e2eb36ad26bc62d8 --- /dev/null +++ b/crates/vim/test_data/test_repeat_clear_count.json @@ -0,0 +1,21 @@ +{"Put":{"state":"ˇthe quick brown\nfox jumps over\nthe lazy dog"}} +{"Key":"d"} +{"Key":"d"} +{"Get":{"state":"ˇfox jumps over\nthe lazy dog","mode":"Normal"}} +{"Key":"2"} +{"Key":"d"} +{"Key":"."} +{"Get":{"state":"ˇfox jumps over\nthe lazy dog","mode":"Normal"}} +{"Key":"."} +{"Get":{"state":"ˇthe lazy dog","mode":"Normal"}} +{"Put":{"state":"ˇthe quick brown\nfox jumps over\nthe lazy dog\nthe quick brown\nfox jumps over\nthe lazy dog"}} +{"Key":"2"} +{"Key":"d"} +{"Key":"d"} +{"Get":{"state":"ˇthe lazy dog\nthe quick brown\nfox jumps over\nthe lazy dog","mode":"Normal"}} +{"Key":"5"} +{"Key":"d"} +{"Key":"."} +{"Get":{"state":"ˇthe lazy dog\nthe quick brown\nfox jumps over\nthe lazy dog","mode":"Normal"}} +{"Key":"."} +{"Get":{"state":"ˇfox jumps over\nthe lazy dog","mode":"Normal"}} diff --git a/crates/vim/test_data/test_repeat_clear_repeat.json b/crates/vim/test_data/test_repeat_clear_repeat.json new file mode 100644 index 0000000000000000000000000000000000000000..39d96e2a3759d75994e24e6ad80a3ef00b64259b --- /dev/null +++ b/crates/vim/test_data/test_repeat_clear_repeat.json @@ -0,0 +1,8 @@ +{"Put":{"state":"ˇthe quick brown\nfox jumps over\nthe lazy dog"}} +{"Key":"d"} +{"Key":"d"} +{"Get":{"state":"ˇfox jumps over\nthe lazy dog","mode":"Normal"}} +{"Key":"d"} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"ˇthe lazy dog","mode":"Normal"}} diff --git a/crates/vim/test_data/test_repeat_grouping_41735.json b/crates/vim/test_data/test_repeat_grouping_41735.json new file mode 100644 index 0000000000000000000000000000000000000000..6523be6e4bebad7162a15da1af3455394abdfe12 --- /dev/null +++ b/crates/vim/test_data/test_repeat_grouping_41735.json @@ -0,0 +1,10 @@ +{"Put":{"state":"ˇ"}} +{"Key":"i"} +{"Key":"a"} +{"Key":"escape"} +{"Key":"."} +{"Key":"."} +{"Key":"."} +{"Get":{"state":"ˇaaaa","mode":"Normal"}} +{"Key":"u"} +{"Get":{"state":"ˇaaa","mode":"Normal"}} diff --git a/crates/vim/test_data/test_temporary_mode.json b/crates/vim/test_data/test_temporary_mode.json new file mode 100644 index 0000000000000000000000000000000000000000..be370cf744f9fbd9bfed0a89a6db5ef7b6d568ad --- /dev/null +++ b/crates/vim/test_data/test_temporary_mode.json @@ -0,0 +1,27 @@ +{"Put":{"state":"lorem ˇipsum"}} +{"Key":"i"} +{"Get":{"state":"lorem ˇipsum","mode":"Insert"}} +{"Key":"ctrl-o"} +{"Key":"$"} +{"Get":{"state":"lorem ipsumˇ","mode":"Insert"}} +{"Put":{"state":"loremˇ ipsum dolor"}} +{"Key":"a"} +{"Get":{"state":"lorem ˇipsum dolor","mode":"Insert"}} +{"Key":"a"} +{"Key":"n"} +{"Key":"d"} +{"Key":"space"} +{"Key":"ctrl-o"} +{"Key":"w"} +{"Get":{"state":"lorem and ipsum ˇdolor","mode":"Insert"}} +{"Put":{"state":"lorem ˇipsum dolor"}} +{"Key":"i"} +{"Get":{"state":"lorem ˇipsum dolor","mode":"Insert"}} +{"Key":"a"} +{"Key":"n"} +{"Key":"d"} +{"Key":"space"} +{"Key":"ctrl-o"} +{"Key":"y"} +{"Key":"$"} +{"Get":{"state":"lorem and ˇipsum dolor","mode":"Insert"}} diff --git a/crates/vim/test_data/test_unmatched_backward_markdown.json b/crates/vim/test_data/test_unmatched_backward_markdown.json new file mode 100644 index 0000000000000000000000000000000000000000..c2df848b812e1685c39b7b8c353401493cc5a4be --- /dev/null +++ b/crates/vim/test_data/test_unmatched_backward_markdown.json @@ -0,0 +1,9 @@ +{"Exec":{"command":"set filetype=markdown"}} +{"Put":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\nˇ }\n}\n```\n"}} +{"Key":"["} +{"Key":"{"} +{"Get":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> ˇ{\n }\n}\n```\n","mode":"Normal"}} +{"Put":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\n } ˇ\n}\n```\n"}} +{"Key":"["} +{"Key":"{"} +{"Get":{"state":"```rs\nimpl Worktree ˇ{\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\n } \n}\n```\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_unmatched_forward_markdown.json b/crates/vim/test_data/test_unmatched_forward_markdown.json new file mode 100644 index 0000000000000000000000000000000000000000..753f68d04fb458891de915134b5da8219742c06f --- /dev/null +++ b/crates/vim/test_data/test_unmatched_forward_markdown.json @@ -0,0 +1,9 @@ +{"Exec":{"command":"set filetype=markdown"}} +{"Put":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\nˇ }\n}\n```\n"}} +{"Key":"]"} +{"Key":"}"} +{"Get":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\n ˇ}\n}\n```\n","mode":"Normal"}} +{"Put":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\n } ˇ\n}\n```\n"}} +{"Key":"]"} +{"Key":"}"} +{"Get":{"state":"```rs\nimpl Worktree {\n pub async fn open_buffers(&self, path: &Path) -> impl Iterator<&Buffer> {\n } \nˇ}\n```\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_yank_paragraph_with_paste.json b/crates/vim/test_data/test_yank_paragraph_with_paste.json new file mode 100644 index 0000000000000000000000000000000000000000..d73d1f6d3b36e7b1df17559dd525238f13606976 --- /dev/null +++ b/crates/vim/test_data/test_yank_paragraph_with_paste.json @@ -0,0 +1,10 @@ +{"Put":{"state":"first paragraph\nˇstill first\n\nsecond paragraph\nstill second\n\nthird paragraph\n"}} +{"Key":"y"} +{"Key":"a"} +{"Key":"p"} +{"Get":{"state":"ˇfirst paragraph\nstill first\n\nsecond paragraph\nstill second\n\nthird paragraph\n","mode":"Normal"}} +{"ReadRegister":{"name":"\"","value":"first paragraph\nstill first\n\n"}} +{"Key":"j"} +{"Key":"j"} +{"Key":"p"} +{"Get":{"state":"first paragraph\nstill first\n\nˇfirst paragraph\nstill first\n\nsecond paragraph\nstill second\n\nthird paragraph\n","mode":"Normal"}} diff --git a/crates/which_key/Cargo.toml b/crates/which_key/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..f53ba45dd71abc972ce23efb8871f485dfe47207 --- /dev/null +++ b/crates/which_key/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "which_key" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/which_key.rs" +doctest = false + +[dependencies] +command_palette.workspace = true +gpui.workspace = true +serde.workspace = true +settings.workspace = true +theme.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true diff --git a/crates/which_key/LICENSE-GPL b/crates/which_key/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/which_key/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/which_key/src/which_key.rs b/crates/which_key/src/which_key.rs new file mode 100644 index 0000000000000000000000000000000000000000..70889c100f33020a3ceaa8af1ba8812d5e7d4adb --- /dev/null +++ b/crates/which_key/src/which_key.rs @@ -0,0 +1,98 @@ +//! Which-key support for Zed. + +mod which_key_modal; +mod which_key_settings; + +use gpui::{App, Keystroke}; +use settings::Settings; +use std::{sync::LazyLock, time::Duration}; +use util::ResultExt; +use which_key_modal::WhichKeyModal; +use which_key_settings::WhichKeySettings; +use workspace::Workspace; + +pub fn init(cx: &mut App) { + WhichKeySettings::register(cx); + + cx.observe_new(|_: &mut Workspace, window, cx| { + let Some(window) = window else { + return; + }; + let mut timer = None; + cx.observe_pending_input(window, move |workspace, window, cx| { + if window.pending_input_keystrokes().is_none() { + if let Some(modal) = workspace.active_modal::(cx) { + modal.update(cx, |modal, cx| modal.dismiss(cx)); + }; + timer.take(); + return; + } + + let which_key_settings = WhichKeySettings::get_global(cx); + if !which_key_settings.enabled { + return; + } + + let delay_ms = which_key_settings.delay_ms; + + timer.replace(cx.spawn_in(window, async move |workspace_handle, cx| { + cx.background_executor() + .timer(Duration::from_millis(delay_ms)) + .await; + workspace_handle + .update_in(cx, |workspace, window, cx| { + if workspace.active_modal::(cx).is_some() { + return; + }; + + workspace.toggle_modal(window, cx, |window, cx| { + WhichKeyModal::new(workspace_handle.clone(), window, cx) + }); + }) + .log_err(); + })); + }) + .detach(); + }) + .detach(); +} + +// Hard-coded list of keystrokes to filter out from which-key display +pub static FILTERED_KEYSTROKES: LazyLock>> = LazyLock::new(|| { + [ + // Modifiers on normal vim commands + "g h", + "g j", + "g k", + "g l", + "g $", + "g ^", + // Duplicate keys with "ctrl" held, e.g. "ctrl-w ctrl-a" is duplicate of "ctrl-w a" + "ctrl-w ctrl-a", + "ctrl-w ctrl-c", + "ctrl-w ctrl-h", + "ctrl-w ctrl-j", + "ctrl-w ctrl-k", + "ctrl-w ctrl-l", + "ctrl-w ctrl-n", + "ctrl-w ctrl-o", + "ctrl-w ctrl-p", + "ctrl-w ctrl-q", + "ctrl-w ctrl-s", + "ctrl-w ctrl-v", + "ctrl-w ctrl-w", + "ctrl-w ctrl-]", + "ctrl-w ctrl-shift-w", + "ctrl-w ctrl-g t", + "ctrl-w ctrl-g shift-t", + ] + .iter() + .filter_map(|s| { + let keystrokes: Result, _> = s + .split(' ') + .map(|keystroke_str| Keystroke::parse(keystroke_str)) + .collect(); + keystrokes.ok() + }) + .collect() +}); diff --git a/crates/which_key/src/which_key_modal.rs b/crates/which_key/src/which_key_modal.rs new file mode 100644 index 0000000000000000000000000000000000000000..238431b90a8eafdd0e085a3f109e8f812fbe709b --- /dev/null +++ b/crates/which_key/src/which_key_modal.rs @@ -0,0 +1,308 @@ +//! Modal implementation for the which-key display. + +use gpui::prelude::FluentBuilder; +use gpui::{ + App, Context, DismissEvent, EventEmitter, FocusHandle, Focusable, FontWeight, Keystroke, + ScrollHandle, Subscription, WeakEntity, Window, +}; +use settings::Settings; +use std::collections::HashMap; +use theme::ThemeSettings; +use ui::{ + Divider, DividerColor, DynamicSpacing, LabelSize, WithScrollbar, prelude::*, + text_for_keystrokes, +}; +use workspace::{ModalView, Workspace}; + +use crate::FILTERED_KEYSTROKES; + +pub struct WhichKeyModal { + _workspace: WeakEntity, + focus_handle: FocusHandle, + scroll_handle: ScrollHandle, + bindings: Vec<(SharedString, SharedString)>, + pending_keys: SharedString, + _pending_input_subscription: Subscription, + _focus_out_subscription: Subscription, +} + +impl WhichKeyModal { + pub fn new( + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + // Keep focus where it currently is + let focus_handle = window.focused(cx).unwrap_or(cx.focus_handle()); + + let handle = cx.weak_entity(); + let mut this = Self { + _workspace: workspace, + focus_handle: focus_handle.clone(), + scroll_handle: ScrollHandle::new(), + bindings: Vec::new(), + pending_keys: SharedString::new_static(""), + _pending_input_subscription: cx.observe_pending_input( + window, + |this: &mut Self, window, cx| { + this.update_pending_keys(window, cx); + }, + ), + _focus_out_subscription: window.on_focus_out(&focus_handle, cx, move |_, _, cx| { + handle.update(cx, |_, cx| cx.emit(DismissEvent)).ok(); + }), + }; + this.update_pending_keys(window, cx); + this + } + + pub fn dismiss(&self, cx: &mut Context) { + cx.emit(DismissEvent) + } + + fn update_pending_keys(&mut self, window: &mut Window, cx: &mut Context) { + let Some(pending_keys) = window.pending_input_keystrokes() else { + cx.emit(DismissEvent); + return; + }; + let bindings = window.possible_bindings_for_input(pending_keys); + + let mut binding_data = bindings + .iter() + .map(|binding| { + // Map to keystrokes + ( + binding + .keystrokes() + .iter() + .map(|k| k.inner().to_owned()) + .collect::>(), + binding.action(), + ) + }) + .filter(|(keystrokes, _action)| { + // Check if this binding matches any filtered keystroke pattern + !FILTERED_KEYSTROKES.iter().any(|filtered| { + keystrokes.len() >= filtered.len() + && keystrokes[..filtered.len()] == filtered[..] + }) + }) + .map(|(keystrokes, action)| { + // Map to remaining keystrokes and action name + let remaining_keystrokes = keystrokes[pending_keys.len()..].to_vec(); + let action_name: SharedString = + command_palette::humanize_action_name(action.name()).into(); + (remaining_keystrokes, action_name) + }) + .collect(); + + binding_data = group_bindings(binding_data); + + // Sort bindings from shortest to longest, with groups last + // Using stable sort to preserve relative order of equal elements + binding_data.sort_by(|(keystrokes_a, action_a), (keystrokes_b, action_b)| { + // Groups (actions starting with "+") should go last + let is_group_a = action_a.starts_with('+'); + let is_group_b = action_b.starts_with('+'); + + // First, separate groups from non-groups + let group_cmp = is_group_a.cmp(&is_group_b); + if group_cmp != std::cmp::Ordering::Equal { + return group_cmp; + } + + // Then sort by keystroke count + let keystroke_cmp = keystrokes_a.len().cmp(&keystrokes_b.len()); + if keystroke_cmp != std::cmp::Ordering::Equal { + return keystroke_cmp; + } + + // Finally sort by text length, then lexicographically for full stability + let text_a = text_for_keystrokes(keystrokes_a, cx); + let text_b = text_for_keystrokes(keystrokes_b, cx); + let text_len_cmp = text_a.len().cmp(&text_b.len()); + if text_len_cmp != std::cmp::Ordering::Equal { + return text_len_cmp; + } + text_a.cmp(&text_b) + }); + binding_data.dedup(); + self.pending_keys = text_for_keystrokes(&pending_keys, cx).into(); + self.bindings = binding_data + .into_iter() + .map(|(keystrokes, action)| (text_for_keystrokes(&keystrokes, cx).into(), action)) + .collect(); + } +} + +impl Render for WhichKeyModal { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_rows = !self.bindings.is_empty(); + let viewport_size = window.viewport_size(); + + let max_panel_width = px((f32::from(viewport_size.width) * 0.5).min(480.0)); + let max_content_height = px(f32::from(viewport_size.height) * 0.4); + + // Push above status bar when visible + let status_height = self + ._workspace + .upgrade() + .and_then(|workspace| { + workspace.read_with(cx, |workspace, cx| { + if workspace.status_bar_visible(cx) { + Some( + DynamicSpacing::Base04.px(cx) * 2.0 + + ThemeSettings::get_global(cx).ui_font_size(cx), + ) + } else { + None + } + }) + }) + .unwrap_or(px(0.)); + + let margin_bottom = px(16.); + let bottom_offset = margin_bottom + status_height; + + // Title section + let title_section = { + let mut column = v_flex().gap(px(0.)).child( + div() + .child( + Label::new(self.pending_keys.clone()) + .size(LabelSize::Default) + .weight(FontWeight::MEDIUM) + .color(Color::Accent), + ) + .mb(px(2.)), + ); + + if has_rows { + column = column.child( + div() + .child(Divider::horizontal().color(DividerColor::BorderFaded)) + .mb(px(2.)), + ); + } + + column + }; + + let content = h_flex() + .items_start() + .id("which-key-content") + .gap(px(8.)) + .overflow_y_scroll() + .track_scroll(&self.scroll_handle) + .h_full() + .max_h(max_content_height) + .child( + // Keystrokes column + v_flex() + .gap(px(4.)) + .flex_shrink_0() + .children(self.bindings.iter().map(|(keystrokes, _)| { + div() + .child( + Label::new(keystrokes.clone()) + .size(LabelSize::Default) + .color(Color::Accent), + ) + .text_align(gpui::TextAlign::Right) + })), + ) + .child( + // Actions column + v_flex() + .gap(px(4.)) + .flex_1() + .min_w_0() + .children(self.bindings.iter().map(|(_, action_name)| { + let is_group = action_name.starts_with('+'); + let label_color = if is_group { + Color::Success + } else { + Color::Default + }; + + div().child( + Label::new(action_name.clone()) + .size(LabelSize::Default) + .color(label_color) + .single_line() + .truncate(), + ) + })), + ); + + div() + .id("which-key-buffer-panel-scroll") + .occlude() + .absolute() + .bottom(bottom_offset) + .right(px(16.)) + .min_w(px(220.)) + .max_w(max_panel_width) + .elevation_3(cx) + .px(px(12.)) + .child(v_flex().child(title_section).when(has_rows, |el| { + el.child( + div() + .max_h(max_content_height) + .child(content) + .vertical_scrollbar_for(&self.scroll_handle, window, cx), + ) + })) + } +} + +impl EventEmitter for WhichKeyModal {} + +impl Focusable for WhichKeyModal { + fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} + +impl ModalView for WhichKeyModal { + fn render_bare(&self) -> bool { + true + } +} + +fn group_bindings( + binding_data: Vec<(Vec, SharedString)>, +) -> Vec<(Vec, SharedString)> { + let mut groups: HashMap, Vec<(Vec, SharedString)>> = + HashMap::new(); + + // Group bindings by their first keystroke + for (remaining_keystrokes, action_name) in binding_data { + let first_key = remaining_keystrokes.first().cloned(); + groups + .entry(first_key) + .or_default() + .push((remaining_keystrokes, action_name)); + } + + let mut result = Vec::new(); + + for (first_key, mut group_bindings) in groups { + // Remove duplicates within each group + group_bindings.dedup_by_key(|(keystrokes, _)| keystrokes.clone()); + + if let Some(first_key) = first_key + && group_bindings.len() > 1 + { + // This is a group - create a single entry with just the first keystroke + let first_keystroke = vec![first_key]; + let count = group_bindings.len(); + result.push((first_keystroke, format!("+{} keybinds", count).into())); + } else { + // Not a group or empty keystrokes - add all bindings as-is + result.append(&mut group_bindings); + } + } + + result +} diff --git a/crates/which_key/src/which_key_settings.rs b/crates/which_key/src/which_key_settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..be19ab1521f4793305efca79b7026f79fd9064e2 --- /dev/null +++ b/crates/which_key/src/which_key_settings.rs @@ -0,0 +1,18 @@ +use settings::{RegisterSetting, Settings, SettingsContent, WhichKeySettingsContent}; + +#[derive(Debug, Clone, Copy, RegisterSetting)] +pub struct WhichKeySettings { + pub enabled: bool, + pub delay_ms: u64, +} + +impl Settings for WhichKeySettings { + fn from_settings(content: &SettingsContent) -> Self { + let which_key: &WhichKeySettingsContent = content.which_key.as_ref().unwrap(); + + Self { + enabled: which_key.enabled.unwrap(), + delay_ms: which_key.delay_ms.unwrap(), + } + } +} diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index d5d3016ab2704392c6cc9cc4bcebf6d50701d3be..956d63580404da351d34af3b5cf5fd531d5a0011 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -35,14 +35,17 @@ clock.workspace = true collections.workspace = true component.workspace = true db.workspace = true +feature_flags.workspace = true fs.workspace = true futures.workspace = true +git.workspace = true gpui.workspace = true http_client.workspace = true itertools.workspace = true language.workspace = true log.workspace = true menu.workspace = true +markdown.workspace = true node_runtime.workspace = true parking_lot.workspace = true postage.workspace = true diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 05af5d080c4c965f3d53f61b5af144a456ce0074..7f4b09df0f94fa421c399ed9d70163f7cc2ba203 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -3,6 +3,7 @@ use crate::{DraggedDock, Event, ModalLayer, Pane}; use crate::{Workspace, status_bar::StatusItemView}; use anyhow::Context as _; use client::proto; + use gpui::{ Action, AnyView, App, Axis, Context, Corner, Entity, EntityId, EventEmitter, FocusHandle, Focusable, IntoElement, KeyContext, MouseButton, MouseDownEvent, MouseUpEvent, ParentElement, @@ -13,6 +14,7 @@ use settings::SettingsStore; use std::sync::Arc; use ui::{ContextMenu, Divider, DividerColor, IconButton, Tooltip, h_flex}; use ui::{prelude::*, right_click_menu}; +use util::ResultExt as _; pub(crate) const RESIZE_HANDLE_SIZE: Pixels = px(6.); @@ -25,6 +27,72 @@ pub enum PanelEvent { pub use proto::PanelId; +pub struct MinimizePane; +pub struct ClosePane; + +pub trait UtilityPane: EventEmitter + EventEmitter + Render { + fn position(&self, window: &Window, cx: &App) -> UtilityPanePosition; + /// The icon to render in the adjacent pane's tab bar for toggling this utility pane + fn toggle_icon(&self, cx: &App) -> IconName; + fn expanded(&self, cx: &App) -> bool; + fn set_expanded(&mut self, expanded: bool, cx: &mut Context); + fn width(&self, cx: &App) -> Pixels; + fn set_width(&mut self, width: Option, cx: &mut Context); +} + +pub trait UtilityPaneHandle: 'static + Send + Sync { + fn position(&self, window: &Window, cx: &App) -> UtilityPanePosition; + fn toggle_icon(&self, cx: &App) -> IconName; + fn expanded(&self, cx: &App) -> bool; + fn set_expanded(&self, expanded: bool, cx: &mut App); + fn width(&self, cx: &App) -> Pixels; + fn set_width(&self, width: Option, cx: &mut App); + fn to_any(&self) -> AnyView; + fn box_clone(&self) -> Box; +} + +impl UtilityPaneHandle for Entity +where + T: UtilityPane, +{ + fn position(&self, window: &Window, cx: &App) -> UtilityPanePosition { + self.read(cx).position(window, cx) + } + + fn toggle_icon(&self, cx: &App) -> IconName { + self.read(cx).toggle_icon(cx) + } + + fn expanded(&self, cx: &App) -> bool { + self.read(cx).expanded(cx) + } + + fn set_expanded(&self, expanded: bool, cx: &mut App) { + self.update(cx, |this, cx| this.set_expanded(expanded, cx)) + } + + fn width(&self, cx: &App) -> Pixels { + self.read(cx).width(cx) + } + + fn set_width(&self, width: Option, cx: &mut App) { + self.update(cx, |this, cx| this.set_width(width, cx)) + } + + fn to_any(&self) -> AnyView { + self.clone().into() + } + + fn box_clone(&self) -> Box { + Box::new(self.clone()) + } +} + +pub enum UtilityPanePosition { + Left, + Right, +} + pub trait Panel: Focusable + EventEmitter + Render + Sized { fn persistent_name() -> &'static str; fn panel_key() -> &'static str; @@ -281,7 +349,7 @@ impl Dock { let focus_subscription = cx.on_focus(&focus_handle, window, |dock: &mut Dock, window, cx| { if let Some(active_entry) = dock.active_panel_entry() { - active_entry.panel.panel_focus_handle(cx).focus(window) + active_entry.panel.panel_focus_handle(cx).focus(window, cx) } }); let zoom_subscription = cx.subscribe(&workspace, |dock, workspace, e: &Event, cx| { @@ -384,6 +452,13 @@ impl Dock { .position(|entry| entry.panel.remote_id() == Some(panel_id)) } + pub fn panel_for_id(&self, panel_id: EntityId) -> Option<&Arc> { + self.panel_entries + .iter() + .find(|entry| entry.panel.panel_id() == panel_id) + .map(|entry| &entry.panel) + } + pub fn first_enabled_panel_idx(&mut self, cx: &mut Context) -> anyhow::Result { self.panel_entries .iter() @@ -491,6 +566,9 @@ impl Dock { new_dock.update(cx, |new_dock, cx| { new_dock.remove_panel(&panel, window, cx); + }); + + new_dock.update(cx, |new_dock, cx| { let index = new_dock.add_panel(panel.clone(), workspace.clone(), window, cx); if was_visible { @@ -498,6 +576,12 @@ impl Dock { new_dock.activate_panel(index, window, cx); } }); + + workspace + .update(cx, |workspace, cx| { + workspace.serialize_workspace(window, cx); + }) + .ok(); } }), cx.subscribe_in( @@ -508,7 +592,7 @@ impl Dock { this.set_panel_zoomed(&panel.to_any(), true, window, cx); if !PanelHandle::panel_focus_handle(panel, cx).contains_focused(window, cx) { - window.focus(&panel.focus_handle(cx)); + window.focus(&panel.focus_handle(cx), cx); } workspace .update(cx, |workspace, cx| { @@ -540,7 +624,7 @@ impl Dock { { this.set_open(true, window, cx); this.activate_panel(ix, window, cx); - window.focus(&panel.read(cx).focus_handle(cx)); + window.focus(&panel.read(cx).focus_handle(cx), cx); } } PanelEvent::Close => { @@ -560,7 +644,16 @@ impl Dock { .binary_search_by_key(&panel.read(cx).activation_priority(), |entry| { entry.panel.activation_priority(cx) }) { - Ok(ix) => ix, + Ok(ix) => { + if cfg!(debug_assertions) { + panic!( + "Panels `{}` and `{}` have the same activation priority. Each panel must have a unique priority so the status bar order is deterministic.", + T::panel_key(), + self.panel_entries[ix].panel.panel_key() + ); + } + ix + } Err(ix) => ix, }; if let Some(active_index) = self.active_panel_index.as_mut() @@ -577,6 +670,7 @@ impl Dock { ); self.restore_state(window, cx); + if panel.read(cx).starts_open(window, cx) { self.activate_panel(index, window, cx); self.set_open(true, window, cx); @@ -610,7 +704,7 @@ impl Dock { panel: &Entity, window: &mut Window, cx: &mut Context, - ) { + ) -> bool { if let Some(panel_ix) = self .panel_entries .iter() @@ -628,8 +722,13 @@ impl Dock { std::cmp::Ordering::Greater => {} } } + self.panel_entries.remove(panel_ix); cx.notify(); + + true + } else { + false } } @@ -882,7 +981,13 @@ impl Render for PanelButtons { .enumerate() .filter_map(|(i, entry)| { let icon = entry.panel.icon(window, cx)?; - let icon_tooltip = entry.panel.icon_tooltip(window, cx)?; + let icon_tooltip = entry + .panel + .icon_tooltip(window, cx) + .ok_or_else(|| { + anyhow::anyhow!("can't render a panel button without an icon tooltip") + }) + .log_err()?; let name = entry.panel.persistent_name(); let panel = entry.panel.clone(); @@ -943,7 +1048,7 @@ impl Render for PanelButtons { name = name, toggle_state = !is_open ); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); window.dispatch_action(action.boxed_clone(), cx) } }) @@ -994,19 +1099,21 @@ pub mod test { pub active: bool, pub focus_handle: FocusHandle, pub size: Pixels, + pub activation_priority: u32, } actions!(test_only, [ToggleTestPanel]); impl EventEmitter for TestPanel {} impl TestPanel { - pub fn new(position: DockPosition, cx: &mut App) -> Self { + pub fn new(position: DockPosition, activation_priority: u32, cx: &mut App) -> Self { Self { position, zoomed: false, active: false, focus_handle: cx.focus_handle(), size: px(300.), + activation_priority, } } } @@ -1072,7 +1179,7 @@ pub mod test { } fn activation_priority(&self) -> u32 { - 100 + self.activation_priority } } diff --git a/crates/workspace/src/invalid_item_view.rs b/crates/workspace/src/invalid_item_view.rs index eb6c8f3299838c1a01777885009fa67271b924d7..08242a1ed0c86bb465c85f79a2047b89f9dc86d2 100644 --- a/crates/workspace/src/invalid_item_view.rs +++ b/crates/workspace/src/invalid_item_view.rs @@ -11,6 +11,7 @@ use zed_actions::workspace::OpenWithSystem; use crate::Item; /// A view to display when a certain buffer/image/other item fails to open. +#[derive(Debug)] pub struct InvalidItemView { /// Which path was attempted to open. pub abs_path: Arc, diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index e68b5a99ee352bb1f5c0242f68e9ab894362497e..1570c125fa33135631d8181359ad34bb7802ec5f 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -11,9 +11,9 @@ use anyhow::Result; use client::{Client, proto}; use futures::{StreamExt, channel::mpsc}; use gpui::{ - Action, AnyElement, AnyView, App, AppContext, Context, Entity, EntityId, EventEmitter, - FocusHandle, Focusable, Font, HighlightStyle, Pixels, Point, Render, SharedString, Task, - WeakEntity, Window, + Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId, + EventEmitter, FocusHandle, Focusable, Font, HighlightStyle, Pixels, Point, Render, + SharedString, Task, WeakEntity, Window, }; use project::{Project, ProjectEntryId, ProjectPath}; pub use settings::{ @@ -64,8 +64,12 @@ pub struct ItemSettings { #[derive(RegisterSetting)] pub struct PreviewTabsSettings { pub enabled: bool, + pub enable_preview_from_project_panel: bool, pub enable_preview_from_file_finder: bool, - pub enable_preview_from_code_navigation: bool, + pub enable_preview_from_multibuffer: bool, + pub enable_preview_multibuffer_from_code_navigation: bool, + pub enable_preview_file_from_code_navigation: bool, + pub enable_keep_preview_on_code_navigation: bool, } impl Settings for ItemSettings { @@ -87,9 +91,19 @@ impl Settings for PreviewTabsSettings { let preview_tabs = content.preview_tabs.as_ref().unwrap(); Self { enabled: preview_tabs.enabled.unwrap(), + enable_preview_from_project_panel: preview_tabs + .enable_preview_from_project_panel + .unwrap(), enable_preview_from_file_finder: preview_tabs.enable_preview_from_file_finder.unwrap(), - enable_preview_from_code_navigation: preview_tabs - .enable_preview_from_code_navigation + enable_preview_from_multibuffer: preview_tabs.enable_preview_from_multibuffer.unwrap(), + enable_preview_multibuffer_from_code_navigation: preview_tabs + .enable_preview_multibuffer_from_code_navigation + .unwrap(), + enable_preview_file_from_code_navigation: preview_tabs + .enable_preview_file_from_code_navigation + .unwrap(), + enable_keep_preview_on_code_navigation: preview_tabs + .enable_keep_preview_on_code_navigation .unwrap(), } } @@ -279,7 +293,7 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if TypeId::of::() == type_id { Some(self_handle.clone().into()) } else { @@ -287,7 +301,7 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { None } @@ -454,7 +468,7 @@ pub trait ItemHandle: 'static + Send { fn workspace_deactivated(&self, window: &mut Window, cx: &mut App); fn navigate(&self, data: Box, window: &mut Window, cx: &mut App) -> bool; fn item_id(&self) -> EntityId; - fn to_any(&self) -> AnyView; + fn to_any_view(&self) -> AnyView; fn is_dirty(&self, cx: &App) -> bool; fn has_deleted_file(&self, cx: &App) -> bool; fn has_conflict(&self, cx: &App) -> bool; @@ -480,7 +494,7 @@ pub trait ItemHandle: 'static + Send { window: &mut Window, cx: &mut App, ) -> Task>; - fn act_as_type(&self, type_id: TypeId, cx: &App) -> Option; + fn act_as_type(&self, type_id: TypeId, cx: &App) -> Option; fn to_followable_item_handle(&self, cx: &App) -> Option>; fn to_serializable_item_handle(&self, cx: &App) -> Option>; fn on_release( @@ -513,7 +527,7 @@ pub trait WeakItemHandle: Send + Sync { impl dyn ItemHandle { pub fn downcast(&self) -> Option> { - self.to_any().downcast().ok() + self.to_any_view().downcast().ok() } pub fn act_as(&self, cx: &App) -> Option> { @@ -869,8 +883,18 @@ impl ItemHandle for Entity { if let Some(item) = weak_item.upgrade() && item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange { - Pane::autosave_item(&item, workspace.project.clone(), window, cx) - .detach_and_log_err(cx); + // Only trigger autosave if focus has truly left the item. + // If focus is still within the item's hierarchy (e.g., moved to a context menu), + // don't trigger autosave to avoid unwanted formatting and cursor jumps. + // Also skip autosave if focus moved to a modal (e.g., command palette), + // since the user is still interacting with the workspace. + let focus_handle = item.item_focus_handle(cx); + if !focus_handle.contains_focused(window, cx) + && !workspace.has_active_modal(window, cx) + { + Pane::autosave_item(&item, workspace.project.clone(), window, cx) + .detach_and_log_err(cx); + } } }, ) @@ -911,7 +935,7 @@ impl ItemHandle for Entity { self.entity_id() } - fn to_any(&self) -> AnyView { + fn to_any_view(&self) -> AnyView { self.clone().into() } @@ -964,7 +988,7 @@ impl ItemHandle for Entity { self.update(cx, |item, cx| item.reload(project, window, cx)) } - fn act_as_type<'a>(&'a self, type_id: TypeId, cx: &'a App) -> Option { + fn act_as_type<'a>(&'a self, type_id: TypeId, cx: &'a App) -> Option { self.read(cx).act_as_type(type_id, self, cx) } @@ -981,7 +1005,7 @@ impl ItemHandle for Entity { } fn to_searchable_item_handle(&self, cx: &App) -> Option> { - self.read(cx).as_searchable(self) + self.read(cx).as_searchable(self, cx) } fn breadcrumb_location(&self, cx: &App) -> ToolbarItemLocation { @@ -1009,7 +1033,7 @@ impl ItemHandle for Entity { } fn to_serializable_item_handle(&self, cx: &App) -> Option> { - SerializableItemRegistry::view_to_serializable_item_handle(self.to_any(), cx) + SerializableItemRegistry::view_to_serializable_item_handle(self.to_any_view(), cx) } fn preserve_preview(&self, cx: &App) -> bool { @@ -1022,7 +1046,7 @@ impl ItemHandle for Entity { fn relay_action(&self, action: Box, window: &mut Window, cx: &mut App) { self.update(cx, |this, cx| { - this.focus_handle(cx).focus(window); + this.focus_handle(cx).focus(window, cx); window.dispatch_action(action, cx); }) } @@ -1030,13 +1054,13 @@ impl ItemHandle for Entity { impl From> for AnyView { fn from(val: Box) -> Self { - val.to_any() + val.to_any_view() } } impl From<&Box> for AnyView { fn from(val: &Box) -> Self { - val.to_any() + val.to_any_view() } } @@ -1247,7 +1271,7 @@ impl FollowableItemHandle for Entity { window: &mut Window, cx: &mut App, ) -> Option { - let existing = existing.to_any().downcast::().ok()?; + let existing = existing.to_any_view().downcast::().ok()?; self.read(cx).dedup(existing.read(cx), window, cx) } diff --git a/crates/workspace/src/modal_layer.rs b/crates/workspace/src/modal_layer.rs index bcd7db3a82aec46405927e118af86cf4a0d4912b..db4d85752835299117dba7fc2aeb1833383a390a 100644 --- a/crates/workspace/src/modal_layer.rs +++ b/crates/workspace/src/modal_layer.rs @@ -22,12 +22,17 @@ pub trait ModalView: ManagedView { fn fade_out_background(&self) -> bool { false } + + fn render_bare(&self) -> bool { + false + } } trait ModalViewHandle { fn on_before_dismiss(&mut self, window: &mut Window, cx: &mut App) -> DismissDecision; fn view(&self) -> AnyView; fn fade_out_background(&self, cx: &mut App) -> bool; + fn render_bare(&self, cx: &mut App) -> bool; } impl ModalViewHandle for Entity { @@ -42,6 +47,10 @@ impl ModalViewHandle for Entity { fn fade_out_background(&self, cx: &mut App) -> bool { self.read(cx).fade_out_background() } + + fn render_bare(&self, cx: &mut App) -> bool { + self.read(cx).render_bare() + } } pub struct ActiveModal { @@ -116,7 +125,7 @@ impl ModalLayer { focus_handle, }); cx.defer_in(window, move |_, window, cx| { - window.focus(&new_modal.focus_handle(cx)); + window.focus(&new_modal.focus_handle(cx), cx); }); cx.notify(); } @@ -144,7 +153,7 @@ impl ModalLayer { if let Some(previous_focus) = active_modal.previous_focus_handle && active_modal.focus_handle.contains_focused(window, cx) { - previous_focus.focus(window); + previous_focus.focus(window, cx); } cx.notify(); } @@ -167,32 +176,27 @@ impl ModalLayer { impl Render for ModalLayer { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let Some(active_modal) = &self.active_modal else { - return div(); + return div().into_any_element(); }; + if active_modal.modal.render_bare(cx) { + return active_modal.modal.view().into_any_element(); + } + div() - .occlude() .absolute() .size_full() - .top_0() - .left_0() - .when(active_modal.modal.fade_out_background(cx), |el| { + .inset_0() + .occlude() + .when(active_modal.modal.fade_out_background(cx), |this| { let mut background = cx.theme().colors().elevated_surface_background; background.fade_out(0.2); - el.bg(background) + this.bg(background) }) - .on_mouse_down( - MouseButton::Left, - cx.listener(|this, _, window, cx| { - this.hide_modal(window, cx); - }), - ) .child( v_flex() .h(px(0.0)) .top_20() - .flex() - .flex_col() .items_center() .track_focus(&active_modal.focus_handle) .child( @@ -204,5 +208,6 @@ impl Render for ModalLayer { }), ), ) + .into_any_element() } } diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 70be040df7c3718ba903565100b8548dcfc8b785..3b126d329e7fafefa4043661c5039f1e17b09b54 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -3,9 +3,12 @@ use anyhow::Context as _; use gpui::{ AnyView, App, AppContext as _, AsyncWindowContext, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, Render, ScrollHandle, - Task, svg, + Task, TextStyleRefinement, UnderlineStyle, svg, }; +use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use parking_lot::Mutex; +use settings::Settings; +use theme::ThemeSettings; use std::ops::Deref; use std::sync::{Arc, LazyLock}; @@ -41,7 +44,7 @@ pub enum NotificationId { impl NotificationId { /// Returns a unique [`NotificationId`] for the given type. - pub fn unique() -> Self { + pub const fn unique() -> Self { Self::Unique(TypeId::of::()) } @@ -216,6 +219,7 @@ pub struct LanguageServerPrompt { focus_handle: FocusHandle, request: Option, scroll_handle: ScrollHandle, + markdown: Entity, } impl Focusable for LanguageServerPrompt { @@ -228,10 +232,13 @@ impl Notification for LanguageServerPrompt {} impl LanguageServerPrompt { pub fn new(request: project::LanguageServerPromptRequest, cx: &mut App) -> Self { + let markdown = cx.new(|cx| Markdown::new(request.message.clone().into(), None, None, cx)); + Self { focus_handle: cx.focus_handle(), request: Some(request), scroll_handle: ScrollHandle::new(), + markdown, } } @@ -262,7 +269,7 @@ impl Render for LanguageServerPrompt { }; let (icon, color) = match request.level { - PromptLevel::Info => (IconName::Info, Color::Accent), + PromptLevel::Info => (IconName::Info, Color::Muted), PromptLevel::Warning => (IconName::Warning, Color::Warning), PromptLevel::Critical => (IconName::XCircle, Color::Error), }; @@ -291,16 +298,15 @@ impl Render for LanguageServerPrompt { .child( h_flex() .justify_between() - .items_start() .child( h_flex() .gap_2() - .child(Icon::new(icon).color(color)) + .child(Icon::new(icon).color(color).size(IconSize::Small)) .child(Label::new(request.lsp_name.clone())), ) .child( h_flex() - .gap_2() + .gap_1() .child( IconButton::new("copy", IconName::Copy) .on_click({ @@ -317,15 +323,17 @@ impl Render for LanguageServerPrompt { IconButton::new(close_id, close_icon) .tooltip(move |_window, cx| { if suppress { - Tooltip::for_action( - "Suppress.\nClose with click.", - &SuppressNotification, + Tooltip::with_meta( + "Suppress", + Some(&SuppressNotification), + "Click to close", cx, ) } else { - Tooltip::for_action( - "Close.\nSuppress with shift-click.", - &menu::Cancel, + Tooltip::with_meta( + "Close", + Some(&menu::Cancel), + "Suppress with shift-click", cx, ) } @@ -342,7 +350,16 @@ impl Render for LanguageServerPrompt { ), ), ) - .child(Label::new(request.message.to_string()).size(LabelSize::Small)) + .child( + MarkdownElement::new(self.markdown.clone(), markdown_style(window, cx)) + .text_size(TextSize::Small.rems(cx)) + .code_block_renderer(markdown::CodeBlockRenderer::Default { + copy_button: false, + copy_button_on_hover: false, + border: false, + }) + .on_url_click(|link, _, cx| cx.open_url(&link)), + ) .children(request.actions.iter().enumerate().map(|(ix, action)| { let this_handle = cx.entity(); Button::new(ix, action.title.clone()) @@ -369,6 +386,42 @@ fn workspace_error_notification_id() -> NotificationId { NotificationId::unique::() } +fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle { + let settings = ThemeSettings::get_global(cx); + let ui_font_family = settings.ui_font.family.clone(); + let ui_font_fallbacks = settings.ui_font.fallbacks.clone(); + let buffer_font_family = settings.buffer_font.family.clone(); + let buffer_font_fallbacks = settings.buffer_font.fallbacks.clone(); + + let mut base_text_style = window.text_style(); + base_text_style.refine(&TextStyleRefinement { + font_family: Some(ui_font_family), + font_fallbacks: ui_font_fallbacks, + color: Some(cx.theme().colors().text), + ..Default::default() + }); + + MarkdownStyle { + base_text_style, + selection_background_color: cx.theme().colors().element_selection_background, + inline_code: TextStyleRefinement { + background_color: Some(cx.theme().colors().editor_background.opacity(0.5)), + font_family: Some(buffer_font_family), + font_fallbacks: buffer_font_fallbacks, + ..Default::default() + }, + link: TextStyleRefinement { + underline: Some(UnderlineStyle { + thickness: px(1.), + color: Some(cx.theme().colors().text_accent), + wavy: false, + }), + ..Default::default() + }, + ..Default::default() + } +} + #[derive(Debug, Clone)] pub struct ErrorMessagePrompt { message: SharedString, @@ -593,9 +646,9 @@ pub mod simple_message_notification { use gpui::{ AnyElement, DismissEvent, EventEmitter, FocusHandle, Focusable, ParentElement, Render, - SharedString, Styled, + ScrollHandle, SharedString, Styled, }; - use ui::prelude::*; + use ui::{WithScrollbar, prelude::*}; use crate::notifications::NotificationFrame; @@ -617,6 +670,7 @@ pub mod simple_message_notification { show_close_button: bool, show_suppress_button: bool, title: Option, + scroll_handle: ScrollHandle, } impl Focusable for MessageNotification { @@ -661,6 +715,7 @@ pub mod simple_message_notification { show_suppress_button: true, title: None, focus_handle: cx.focus_handle(), + scroll_handle: ScrollHandle::new(), } } @@ -777,7 +832,18 @@ pub mod simple_message_notification { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { NotificationFrame::new() .with_title(self.title.clone()) - .with_content((self.build_content)(window, cx)) + .with_content( + div() + .child( + div() + .id("message-notification-content") + .max_h(vh(0.6, window)) + .overflow_y_scroll() + .track_scroll(&self.scroll_handle.clone()) + .child((self.build_content)(window, cx)), + ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx), + ) .show_close_button(self.show_close_button) .show_suppress_button(self.show_suppress_button) .on_close(cx.listener(|_, suppress, _, cx| { @@ -1071,9 +1137,9 @@ where window.spawn(cx, async move |cx| { let result = self.await; if let Err(err) = result.as_ref() { - log::error!("{err:?}"); + log::error!("{err:#}"); if let Ok(prompt) = cx.update(|window, cx| { - let mut display = format!("{err}"); + let mut display = format!("{err:#}"); if !display.ends_with('\n') { display.push('.'); display.push(' ') diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 24f4254232b33975d77f227a6fa2af57d49c25fd..f6256aee46b9e2b5c29c020e9ee12f6ff510210f 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1,7 +1,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, - WorkspaceItemBuilder, + WorkspaceItemBuilder, ZoomIn, ZoomOut, invalid_item_view::InvalidItemView, item::{ ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings, @@ -11,15 +11,17 @@ use crate::{ move_item, notifications::NotifyResultExt, toolbar::Toolbar, + utility_pane::UtilityPaneSlot, workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, }; use anyhow::Result; use collections::{BTreeSet, HashMap, HashSet, VecDeque}; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; use futures::{StreamExt, stream::FuturesUnordered}; use gpui::{ Action, AnyElement, App, AsyncWindowContext, ClickEvent, ClipboardItem, Context, Corner, Div, DragMoveEvent, Entity, EntityId, EventEmitter, ExternalPaths, FocusHandle, FocusOutEvent, - Focusable, IsZero, KeyContext, MouseButton, MouseDownEvent, NavigationDirection, Pixels, Point, + Focusable, KeyContext, MouseButton, MouseDownEvent, NavigationDirection, Pixels, Point, PromptLevel, Render, ScrollHandle, Subscription, Task, WeakEntity, WeakFocusHandle, Window, actions, anchored, deferred, prelude::*, }; @@ -45,10 +47,9 @@ use std::{ }; use theme::ThemeSettings; use ui::{ - ButtonSize, Color, ContextMenu, ContextMenuEntry, ContextMenuItem, DecoratedIcon, IconButton, - IconButtonShape, IconDecoration, IconDecorationKind, IconName, IconSize, Indicator, Label, - PopoverMenu, PopoverMenuHandle, Tab, TabBar, TabPosition, Tooltip, prelude::*, - right_click_menu, + ContextMenu, ContextMenuEntry, ContextMenuItem, DecoratedIcon, IconButtonShape, IconDecoration, + IconDecorationKind, Indicator, PopoverMenu, PopoverMenuHandle, Tab, TabBar, TabPosition, + Tooltip, prelude::*, right_click_menu, }; use util::{ResultExt, debug_panic, maybe, paths::PathStyle, truncate_and_remove_front}; @@ -396,6 +397,11 @@ pub struct Pane { diagnostic_summary_update: Task<()>, /// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here. pub project_item_restoration_data: HashMap>, + welcome_page: Option>, + + pub in_center_group: bool, + pub is_upper_left: bool, + pub is_upper_right: bool, } pub struct ActivationHistoryEntry { @@ -422,8 +428,9 @@ struct NavHistoryState { next_timestamp: Arc, } -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Default, Copy, Clone)] pub enum NavigationMode { + #[default] Normal, GoingBack, GoingForward, @@ -432,12 +439,6 @@ pub enum NavigationMode { Disabled, } -impl Default for NavigationMode { - fn default() -> Self { - Self::Normal - } -} - pub struct NavigationEntry { pub item: Arc, pub data: Option>, @@ -545,6 +546,10 @@ impl Pane { zoom_out_on_close: true, diagnostic_summary_update: Task::ready(()), project_item_restoration_data: HashMap::default(), + welcome_page: None, + in_center_group: false, + is_upper_left: false, + is_upper_right: false, } } @@ -620,17 +625,21 @@ impl Pane { self.last_focus_handle_by_item.get(&active_item.item_id()) && let Some(focus_handle) = weak_last_focus_handle.upgrade() { - focus_handle.focus(window); + focus_handle.focus(window, cx); return; } - active_item.item_focus_handle(cx).focus(window); + active_item.item_focus_handle(cx).focus(window, cx); } else if let Some(focused) = window.focused(cx) && !self.context_menu_focused(window, cx) { self.last_focus_handle_by_item .insert(active_item.item_id(), focused.downgrade()); } + } else if let Some(welcome_page) = self.welcome_page.as_ref() { + if self.focus_handle.is_focused(window) { + welcome_page.read(cx).focus_handle(cx).focus(window, cx); + } } } @@ -878,10 +887,35 @@ impl Pane { self.preview_item_id == Some(item_id) } + /// Promotes the item with the given ID to not be a preview item. + /// This does nothing if it wasn't already a preview item. + pub fn unpreview_item_if_preview(&mut self, item_id: EntityId) { + if self.is_active_preview_item(item_id) { + self.preview_item_id = None; + } + } + /// Marks the item with the given ID as the preview item. /// This will be ignored if the global setting `preview_tabs` is disabled. - pub fn set_preview_item_id(&mut self, item_id: Option, cx: &App) { - if PreviewTabsSettings::get_global(cx).enabled { + /// + /// The old preview item (if there was one) is closed and its index is returned. + pub fn replace_preview_item_id( + &mut self, + item_id: EntityId, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let idx = self.close_current_preview_item(window, cx); + self.set_preview_item_id(Some(item_id), cx); + idx + } + + /// Marks the item with the given ID as the preview item. + /// This will be ignored if the global setting `preview_tabs` is disabled. + /// + /// This is a low-level method. Prefer `unpreview_item_if_preview()` or `set_new_preview_item()`. + pub(crate) fn set_preview_item_id(&mut self, item_id: Option, cx: &App) { + if item_id.is_none() || PreviewTabsSettings::get_global(cx).enabled { self.preview_item_id = item_id; } } @@ -900,7 +934,7 @@ impl Pane { && preview_item.item_id() == item_id && !preview_item.preserve_preview(cx) { - self.set_preview_item_id(None, cx); + self.unpreview_item_if_preview(item_id); } } @@ -941,14 +975,8 @@ impl Pane { let set_up_existing_item = |index: usize, pane: &mut Self, window: &mut Window, cx: &mut Context| { - // If the item is already open, and the item is a preview item - // and we are not allowing items to open as preview, mark the item as persistent. - if let Some(preview_item_id) = pane.preview_item_id - && let Some(tab) = pane.items.get(index) - && tab.item_id() == preview_item_id - && !allow_preview - { - pane.set_preview_item_id(None, cx); + if !allow_preview && let Some(item) = pane.items.get(index) { + pane.unpreview_item_if_preview(item.item_id()); } if activate { pane.activate_item(index, focus_item, focus_item, window, cx); @@ -960,7 +988,7 @@ impl Pane { window: &mut Window, cx: &mut Context| { if allow_preview { - pane.set_preview_item_id(Some(new_item.item_id()), cx); + pane.replace_preview_item_id(new_item.item_id(), window, cx); } if let Some(text) = new_item.telemetry_event_text(cx) { @@ -1041,6 +1069,7 @@ impl Pane { ) -> Option { let item_idx = self.preview_item_idx()?; let id = self.preview_item_id()?; + self.set_preview_item_id(None, cx); let prev_active_item_index = self.active_item_index; self.remove_item(id, false, false, window, cx); @@ -1208,7 +1237,7 @@ impl Pane { pub fn items_of_type(&self) -> impl '_ + Iterator> { self.items .iter() - .filter_map(|item| item.to_any().downcast().ok()) + .filter_map(|item| item.to_any_view().downcast().ok()) } pub fn active_item(&self) -> Option> { @@ -1282,6 +1311,25 @@ impl Pane { } } + pub fn zoom_in(&mut self, _: &ZoomIn, window: &mut Window, cx: &mut Context) { + if !self.can_toggle_zoom { + cx.propagate(); + } else if !self.zoomed && !self.items.is_empty() { + if !self.focus_handle.contains_focused(window, cx) { + cx.focus_self(window); + } + cx.emit(Event::ZoomIn); + } + } + + pub fn zoom_out(&mut self, _: &ZoomOut, _window: &mut Window, cx: &mut Context) { + if !self.can_toggle_zoom { + cx.propagate(); + } else if self.zoomed { + cx.emit(Event::ZoomOut); + } + } + pub fn activate_item( &mut self, index: usize, @@ -1951,7 +1999,7 @@ impl Pane { let should_activate = activate_pane || self.has_focus(window, cx); if self.items.len() == 1 && should_activate { - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } else { self.activate_item( index_to_activate, @@ -1986,9 +2034,7 @@ impl Pane { item.on_removed(cx); self.nav_history.set_mode(mode); - if self.is_active_preview_item(item.item_id()) { - self.set_preview_item_id(None, cx); - } + self.unpreview_item_if_preview(item.item_id()); if let Some(path) = item.project_path(cx) { let abs_path = self @@ -2199,9 +2245,7 @@ impl Pane { if can_save { pane.update_in(cx, |pane, window, cx| { - if pane.is_active_preview_item(item.item_id()) { - pane.set_preview_item_id(None, cx); - } + pane.unpreview_item_if_preview(item.item_id()); item.save( SaveOptions { format: should_format, @@ -2306,7 +2350,7 @@ impl Pane { pub fn focus_active_item(&mut self, window: &mut Window, cx: &mut Context) { if let Some(active_item) = self.active_item() { let focus_handle = active_item.item_focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); } } @@ -2455,8 +2499,8 @@ impl Pane { let id = self.item_for_index(ix)?.item_id(); let should_activate = ix == self.active_item_index; - if matches!(operation, PinOperation::Pin) && self.is_active_preview_item(id) { - self.set_preview_item_id(None, cx); + if matches!(operation, PinOperation::Pin) { + self.unpreview_item_if_preview(id); } match operation { @@ -2596,6 +2640,7 @@ impl Pane { let close_side = &settings.close_position; let show_close_button = &settings.show_close_button; let indicator = render_item_indicator(item.boxed_clone(), cx); + let tab_tooltip_content = item.tab_tooltip_content(cx); let item_id = item.item_id(); let is_first_item = ix == 0; let is_last_item = ix == self.items.len() - 1; @@ -2628,12 +2673,9 @@ impl Pane { ) .on_mouse_down( MouseButton::Left, - cx.listener(move |pane, event: &MouseDownEvent, _, cx| { - if let Some(id) = pane.preview_item_id - && id == item_id - && event.click_count > 1 - { - pane.set_preview_item_id(None, cx); + cx.listener(move |pane, event: &MouseDownEvent, _, _| { + if event.click_count > 1 { + pane.unpreview_item_if_preview(item_id); } }), ) @@ -2683,12 +2725,6 @@ impl Pane { this.drag_split_direction = None; this.handle_external_paths_drop(paths, window, cx) })) - .when_some(item.tab_tooltip_content(cx), |tab, content| match content { - TabTooltipContent::Text(text) => tab.tooltip(Tooltip::text(text)), - TabTooltipContent::Custom(element_fn) => { - tab.tooltip(move |window, cx| element_fn(window, cx)) - } - }) .start_slot::(indicator) .map(|this| { let end_slot_action: &'static dyn Action; @@ -2729,11 +2765,11 @@ impl Pane { .map(|this| { if is_active { let focus_handle = focus_handle.clone(); - this.tooltip(move |_window, cx| { + this.tooltip(move |window, cx| { Tooltip::for_action_in( end_slot_tooltip_text, end_slot_action, - &focus_handle, + &window.focused(cx).unwrap_or_else(|| focus_handle.clone()), cx, ) }) @@ -2755,7 +2791,15 @@ impl Pane { }) .flatten(), ) - .child(label), + .child(label) + .id(("pane-tab-content", ix)) + .map(|this| match tab_tooltip_content { + Some(TabTooltipContent::Text(text)) => this.tooltip(Tooltip::text(text)), + Some(TabTooltipContent::Custom(element_fn)) => { + this.tooltip(move |window, cx| element_fn(window, cx)) + } + None => this, + }), ); let single_entry_to_resolve = (self.items[ix].buffer_kind(cx) == ItemBufferKind::Singleton) @@ -3022,7 +3066,13 @@ impl Pane { } fn render_tab_bar(&mut self, window: &mut Window, cx: &mut Context) -> AnyElement { + let Some(workspace) = self.workspace.upgrade() else { + return gpui::Empty.into_any(); + }; + let focus_handle = self.focus_handle.clone(); + let is_pane_focused = self.has_focus(window, cx); + let navigate_backward = IconButton::new("navigate_backward", IconName::ArrowLeft) .icon_size(IconSize::Small) .on_click({ @@ -3036,9 +3086,80 @@ impl Pane { .disabled(!self.can_navigate_backward()) .tooltip({ let focus_handle = focus_handle.clone(); - move |_window, cx| Tooltip::for_action_in("Go Back", &GoBack, &focus_handle, cx) + move |window, cx| { + Tooltip::for_action_in( + "Go Back", + &GoBack, + &window.focused(cx).unwrap_or_else(|| focus_handle.clone()), + cx, + ) + } }); + let open_aside_left = { + let workspace = workspace.read(cx); + workspace.utility_pane(UtilityPaneSlot::Left).map(|pane| { + let toggle_icon = pane.toggle_icon(cx); + let workspace_handle = self.workspace.clone(); + + h_flex() + .h_full() + .pr_1p5() + .border_r_1() + .border_color(cx.theme().colors().border) + .child( + IconButton::new("open_aside_left", toggle_icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Toggle Agent Pane")) // TODO: Probably want to make this generic + .on_click(move |_, window, cx| { + workspace_handle + .update(cx, |workspace, cx| { + workspace.toggle_utility_pane( + UtilityPaneSlot::Left, + window, + cx, + ) + }) + .ok(); + }), + ) + .into_any_element() + }) + }; + + let open_aside_right = { + let workspace = workspace.read(cx); + workspace.utility_pane(UtilityPaneSlot::Right).map(|pane| { + let toggle_icon = pane.toggle_icon(cx); + let workspace_handle = self.workspace.clone(); + + h_flex() + .h_full() + .when(is_pane_focused, |this| { + this.pl(DynamicSpacing::Base04.rems(cx)) + .border_l_1() + .border_color(cx.theme().colors().border) + }) + .child( + IconButton::new("open_aside_right", toggle_icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Toggle Agent Pane")) // TODO: Probably want to make this generic + .on_click(move |_, window, cx| { + workspace_handle + .update(cx, |workspace, cx| { + workspace.toggle_utility_pane( + UtilityPaneSlot::Right, + window, + cx, + ) + }) + .ok(); + }), + ) + .into_any_element() + }) + }; + let navigate_forward = IconButton::new("navigate_forward", IconName::ArrowRight) .icon_size(IconSize::Small) .on_click({ @@ -3052,8 +3173,13 @@ impl Pane { .disabled(!self.can_navigate_forward()) .tooltip({ let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in("Go Forward", &GoForward, &focus_handle, cx) + move |window, cx| { + Tooltip::for_action_in( + "Go Forward", + &GoForward, + &window.focused(cx).unwrap_or_else(|| focus_handle.clone()), + cx, + ) } }); @@ -3079,7 +3205,45 @@ impl Pane { } let unpinned_tabs = tab_items.split_off(self.pinned_tab_count); let pinned_tabs = tab_items; + + let render_aside_toggle_left = cx.has_flag::() + && self + .is_upper_left + .then(|| { + self.workspace.upgrade().and_then(|entity| { + let workspace = entity.read(cx); + workspace + .utility_pane(UtilityPaneSlot::Left) + .map(|pane| !pane.expanded(cx)) + }) + }) + .flatten() + .unwrap_or(false); + + let render_aside_toggle_right = cx.has_flag::() + && self + .is_upper_right + .then(|| { + self.workspace.upgrade().and_then(|entity| { + let workspace = entity.read(cx); + workspace + .utility_pane(UtilityPaneSlot::Right) + .map(|pane| !pane.expanded(cx)) + }) + }) + .flatten() + .unwrap_or(false); + TabBar::new("tab_bar") + .map(|tab_bar| { + if let Some(open_aside_left) = open_aside_left + && render_aside_toggle_left + { + tab_bar.start_child(open_aside_left) + } else { + tab_bar + } + }) .when( self.display_nav_history_buttons.unwrap_or_default(), |tab_bar| { @@ -3102,8 +3266,10 @@ impl Pane { .children(pinned_tabs.len().ne(&0).then(|| { let max_scroll = self.tab_bar_scroll_handle.max_offset().width; // We need to check both because offset returns delta values even when the scroll handle is not scrollable - let is_scrollable = !max_scroll.is_zero(); let is_scrolled = self.tab_bar_scroll_handle.offset().x < px(0.); + // Avoid flickering when max_offset is very small (< 2px). + // The border adds 1-2px which can push max_offset back to 0, creating a loop. + let is_scrollable = max_scroll > px(2.0); let has_active_unpinned_tab = self.active_item_index >= self.pinned_tab_count; h_flex() .children(pinned_tabs) @@ -3170,6 +3336,15 @@ impl Pane { })), ), ) + .map(|tab_bar| { + if let Some(open_aside_right) = open_aside_right + && render_aside_toggle_right + { + tab_bar.end_child(open_aside_right) + } else { + tab_bar + } + }) .into_any_element() } @@ -3259,11 +3434,7 @@ impl Pane { let mut to_pane = cx.entity(); let split_direction = self.drag_split_direction; let item_id = dragged_tab.item.item_id(); - if let Some(preview_item_id) = self.preview_item_id - && item_id == preview_item_id - { - self.set_preview_item_id(None, cx); - } + self.unpreview_item_if_preview(item_id); let is_clone = cfg!(target_os = "macos") && window.modifiers().alt || cfg!(not(target_os = "macos")) && window.modifiers().control; @@ -3753,6 +3924,8 @@ impl Render for Pane { cx.emit(Event::JoinAll); })) .on_action(cx.listener(Pane::toggle_zoom)) + .on_action(cx.listener(Pane::zoom_in)) + .on_action(cx.listener(Pane::zoom_out)) .on_action(cx.listener(Self::navigate_backward)) .on_action(cx.listener(Self::navigate_forward)) .on_action( @@ -3775,15 +3948,17 @@ impl Render for Pane { .on_action(cx.listener(Self::toggle_pin_tab)) .on_action(cx.listener(Self::unpin_all_tabs)) .when(PreviewTabsSettings::get_global(cx).enabled, |this| { - this.on_action(cx.listener(|pane: &mut Pane, _: &TogglePreviewTab, _, cx| { - if let Some(active_item_id) = pane.active_item().map(|i| i.item_id()) { - if pane.is_active_preview_item(active_item_id) { - pane.set_preview_item_id(None, cx); - } else { - pane.set_preview_item_id(Some(active_item_id), cx); + this.on_action( + cx.listener(|pane: &mut Pane, _: &TogglePreviewTab, window, cx| { + if let Some(active_item_id) = pane.active_item().map(|i| i.item_id()) { + if pane.is_active_preview_item(active_item_id) { + pane.unpreview_item_if_preview(active_item_id); + } else { + pane.replace_preview_item_id(active_item_id, window, cx); + } } - } - })) + }), + ) }) .on_action( cx.listener(|pane: &mut Self, action: &CloseActiveItem, window, cx| { @@ -3871,7 +4046,7 @@ impl Render for Pane { .size_full() .overflow_hidden() .child(self.toolbar.clone()) - .child(item.to_any()) + .child(item.to_any_view()) } else { let placeholder = div .id("pane_placeholder") @@ -3891,10 +4066,15 @@ impl Render for Pane { if has_worktrees { placeholder } else { - placeholder.child( - Label::new("Open a file or project to get started.") - .color(Color::Muted), - ) + if self.welcome_page.is_none() { + let workspace = self.workspace.clone(); + self.welcome_page = Some(cx.new(|cx| { + crate::welcome::WelcomePage::new( + workspace, true, window, cx, + ) + })); + } + placeholder.child(self.welcome_page.clone().unwrap()) } } }) @@ -4041,6 +4221,25 @@ impl NavHistory { self.0.lock().mode = NavigationMode::Normal; } + pub fn clear(&mut self, cx: &mut App) { + let mut state = self.0.lock(); + + if state.backward_stack.is_empty() + && state.forward_stack.is_empty() + && state.closed_stack.is_empty() + && state.paths_by_item.is_empty() + { + return; + } + + state.mode = NavigationMode::Normal; + state.backward_stack.clear(); + state.forward_stack.clear(); + state.closed_stack.clear(); + state.paths_by_item.clear(); + state.did_update(cx); + } + pub fn pop(&mut self, mode: NavigationMode, cx: &mut App) -> Option { let mut state = self.0.lock(); let entry = match mode { @@ -4102,6 +4301,7 @@ impl NavHistory { is_preview, }); } + NavigationMode::ClosingItem if is_preview => return, NavigationMode::ClosingItem => { if state.closed_stack.len() >= MAX_NAVIGATION_HISTORY_LEN { state.closed_stack.pop_front(); @@ -6615,13 +6815,13 @@ mod tests { let tab_bar_scroll_handle = pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); assert_eq!(tab_bar_scroll_handle.children_count(), 6); - let tab_bounds = cx.debug_bounds("TAB-3").unwrap(); + let tab_bounds = cx.debug_bounds("TAB-4").unwrap(); let new_tab_button_bounds = cx.debug_bounds("ICON-Plus").unwrap(); let scroll_bounds = tab_bar_scroll_handle.bounds(); let scroll_offset = tab_bar_scroll_handle.offset(); - assert!(tab_bounds.right() <= scroll_bounds.right() + scroll_offset.x); - // -39.5 is the magic number for this setup - assert_eq!(scroll_offset.x, px(-39.5)); + assert!(tab_bounds.right() <= scroll_bounds.right()); + // -43.0 is the magic number for this setup + assert_eq!(scroll_offset.x, px(-43.0)); assert!( !tab_bounds.intersects(&new_tab_button_bounds), "Tab should not overlap with the new tab button, if this is failing check if there's been a redesign!" @@ -6939,7 +7139,7 @@ mod tests { .enumerate() .map(|(ix, item)| { let mut state = item - .to_any() + .to_any_view() .downcast::() .unwrap() .read(cx) diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 36898b127bdd749a9c1867a97bd72dfd6f4e15ea..393ed74e30c9c34bf7cdb22aabf2de2d05aa84f8 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -28,6 +28,7 @@ const VERTICAL_MIN_SIZE: f32 = 100.; #[derive(Clone)] pub struct PaneGroup { pub root: Member, + pub is_center: bool, } pub struct PaneRenderResult { @@ -37,22 +38,31 @@ pub struct PaneRenderResult { impl PaneGroup { pub fn with_root(root: Member) -> Self { - Self { root } + Self { + root, + is_center: false, + } } pub fn new(pane: Entity) -> Self { Self { root: Member::Pane(pane), + is_center: false, } } + pub fn set_is_center(&mut self, is_center: bool) { + self.is_center = is_center; + } + pub fn split( &mut self, old_pane: &Entity, new_pane: &Entity, direction: SplitDirection, + cx: &mut App, ) -> Result<()> { - match &mut self.root { + let result = match &mut self.root { Member::Pane(pane) => { if pane == old_pane { self.root = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); @@ -62,7 +72,11 @@ impl PaneGroup { } } Member::Axis(axis) => axis.split(old_pane, new_pane, direction), + }; + if result.is_ok() { + self.mark_positions(cx); } + result } pub fn bounding_box_for_pane(&self, pane: &Entity) -> Option> { @@ -90,6 +104,7 @@ impl PaneGroup { &mut self, active_pane: &Entity, direction: SplitDirection, + cx: &mut App, ) -> Result { if let Some(pane) = self.find_pane_at_border(direction) && pane == active_pane @@ -97,7 +112,7 @@ impl PaneGroup { return Ok(false); } - if !self.remove(active_pane)? { + if !self.remove_internal(active_pane)? { return Ok(false); } @@ -110,6 +125,7 @@ impl PaneGroup { 0 }; root.insert_pane(idx, active_pane); + self.mark_positions(cx); return Ok(true); } @@ -119,6 +135,7 @@ impl PaneGroup { vec![Member::Pane(active_pane.clone()), self.root.clone()] }; self.root = Member::Axis(PaneAxis::new(direction.axis(), members)); + self.mark_positions(cx); Ok(true) } @@ -133,7 +150,15 @@ impl PaneGroup { /// - Ok(true) if it found and removed a pane /// - Ok(false) if it found but did not remove the pane /// - Err(_) if it did not find the pane - pub fn remove(&mut self, pane: &Entity) -> Result { + pub fn remove(&mut self, pane: &Entity, cx: &mut App) -> Result { + let result = self.remove_internal(pane); + if let Ok(true) = result { + self.mark_positions(cx); + } + result + } + + fn remove_internal(&mut self, pane: &Entity) -> Result { match &mut self.root { Member::Pane(_) => Ok(false), Member::Axis(axis) => { @@ -151,6 +176,7 @@ impl PaneGroup { direction: Axis, amount: Pixels, bounds: &Bounds, + cx: &mut App, ) { match &mut self.root { Member::Pane(_) => {} @@ -158,22 +184,29 @@ impl PaneGroup { let _ = axis.resize(pane, direction, amount, bounds); } }; + self.mark_positions(cx); } - pub fn reset_pane_sizes(&mut self) { + pub fn reset_pane_sizes(&mut self, cx: &mut App) { match &mut self.root { Member::Pane(_) => {} Member::Axis(axis) => { let _ = axis.reset_pane_sizes(); } }; + self.mark_positions(cx); } - pub fn swap(&mut self, from: &Entity, to: &Entity) { + pub fn swap(&mut self, from: &Entity, to: &Entity, cx: &mut App) { match &mut self.root { Member::Pane(_) => {} Member::Axis(axis) => axis.swap(from, to), }; + self.mark_positions(cx); + } + + pub fn mark_positions(&mut self, cx: &mut App) { + self.root.mark_positions(self.is_center, true, true, cx); } pub fn render( @@ -232,8 +265,9 @@ impl PaneGroup { self.pane_at_pixel_position(target) } - pub fn invert_axies(&mut self) { + pub fn invert_axies(&mut self, cx: &mut App) { self.root.invert_pane_axies(); + self.mark_positions(cx); } } @@ -243,6 +277,43 @@ pub enum Member { Pane(Entity), } +impl Member { + pub fn mark_positions( + &mut self, + in_center_group: bool, + is_upper_left: bool, + is_upper_right: bool, + cx: &mut App, + ) { + match self { + Member::Axis(pane_axis) => { + let len = pane_axis.members.len(); + for (idx, member) in pane_axis.members.iter_mut().enumerate() { + let member_upper_left = match pane_axis.axis { + Axis::Vertical => is_upper_left && idx == 0, + Axis::Horizontal => is_upper_left && idx == 0, + }; + let member_upper_right = match pane_axis.axis { + Axis::Vertical => is_upper_right && idx == 0, + Axis::Horizontal => is_upper_right && idx == len - 1, + }; + member.mark_positions( + in_center_group, + member_upper_left, + member_upper_right, + cx, + ); + } + } + Member::Pane(entity) => entity.update(cx, |pane, _| { + pane.in_center_group = in_center_group; + pane.is_upper_left = is_upper_left; + pane.is_upper_right = is_upper_right; + }), + } + } +} + #[derive(Clone, Copy)] pub struct PaneRenderContext<'a> { pub project: &'a Entity, @@ -963,6 +1034,15 @@ impl SplitDirection { Self::Down | Self::Right => true, } } + + pub fn opposite(&self) -> SplitDirection { + match self { + Self::Down => Self::Up, + Self::Up => Self::Down, + Self::Left => Self::Right, + Self::Right => Self::Left, + } + } } mod element { diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 3d7ddf5d2ceae40f19e4684b63f6b33c8b53b280..cf5bdf2ab0059f10f2fb44e2069c8c0baf24d72b 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -9,18 +9,26 @@ use std::{ }; use anyhow::{Context as _, Result, bail}; -use collections::{HashMap, IndexSet}; +use collections::{HashMap, HashSet, IndexSet}; use db::{ + kvp::KEY_VALUE_STORE, query, sqlez::{connection::Connection, domain::Domain}, sqlez_macros::sql, }; -use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size}; -use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}; +use gpui::{Axis, Bounds, Entity, Task, WindowBounds, WindowId, point, size}; +use project::{ + debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}, + trusted_worktrees::{PathTrust, RemoteHostLocation, find_worktree_in_store}, + worktree_store::WorktreeStore, +}; use language::{LanguageName, Toolchain, ToolchainScope}; use project::WorktreeId; -use remote::{RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions}; +use remote::{ + DockerConnectionOptions, RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions, +}; +use serde::{Deserialize, Serialize}; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -44,6 +52,11 @@ use model::{ use self::model::{DockStructure, SerializedWorkspaceLocation}; +// https://www.sqlite.org/limits.html +// > <..> the maximum value of a host parameter number is SQLITE_MAX_VARIABLE_NUMBER, +// > which defaults to <..> 32766 for SQLite versions after 3.32.0. +const MAX_QUERY_PLACEHOLDERS: usize = 32000; + #[derive(Copy, Clone, Debug, PartialEq)] pub(crate) struct SerializedAxis(pub(crate) gpui::Axis); impl sqlez::bindable::StaticColumnCount for SerializedAxis {} @@ -152,6 +165,124 @@ impl Column for SerializedWindowBounds { } } +const DEFAULT_WINDOW_BOUNDS_KEY: &str = "default_window_bounds"; + +pub fn read_default_window_bounds() -> Option<(Uuid, WindowBounds)> { + let json_str = KEY_VALUE_STORE + .read_kvp(DEFAULT_WINDOW_BOUNDS_KEY) + .log_err() + .flatten()?; + + let (display_uuid, persisted) = + serde_json::from_str::<(Uuid, WindowBoundsJson)>(&json_str).ok()?; + Some((display_uuid, persisted.into())) +} + +pub async fn write_default_window_bounds( + bounds: WindowBounds, + display_uuid: Uuid, +) -> anyhow::Result<()> { + let persisted = WindowBoundsJson::from(bounds); + let json_str = serde_json::to_string(&(display_uuid, persisted))?; + KEY_VALUE_STORE + .write_kvp(DEFAULT_WINDOW_BOUNDS_KEY.to_string(), json_str) + .await?; + Ok(()) +} + +#[derive(Serialize, Deserialize)] +pub enum WindowBoundsJson { + Windowed { + x: i32, + y: i32, + width: i32, + height: i32, + }, + Maximized { + x: i32, + y: i32, + width: i32, + height: i32, + }, + Fullscreen { + x: i32, + y: i32, + width: i32, + height: i32, + }, +} + +impl From for WindowBoundsJson { + fn from(b: WindowBounds) -> Self { + match b { + WindowBounds::Windowed(bounds) => { + let origin = bounds.origin; + let size = bounds.size; + WindowBoundsJson::Windowed { + x: f32::from(origin.x).round() as i32, + y: f32::from(origin.y).round() as i32, + width: f32::from(size.width).round() as i32, + height: f32::from(size.height).round() as i32, + } + } + WindowBounds::Maximized(bounds) => { + let origin = bounds.origin; + let size = bounds.size; + WindowBoundsJson::Maximized { + x: f32::from(origin.x).round() as i32, + y: f32::from(origin.y).round() as i32, + width: f32::from(size.width).round() as i32, + height: f32::from(size.height).round() as i32, + } + } + WindowBounds::Fullscreen(bounds) => { + let origin = bounds.origin; + let size = bounds.size; + WindowBoundsJson::Fullscreen { + x: f32::from(origin.x).round() as i32, + y: f32::from(origin.y).round() as i32, + width: f32::from(size.width).round() as i32, + height: f32::from(size.height).round() as i32, + } + } + } + } +} + +impl From for WindowBounds { + fn from(n: WindowBoundsJson) -> Self { + match n { + WindowBoundsJson::Windowed { + x, + y, + width, + height, + } => WindowBounds::Windowed(Bounds { + origin: point(px(x as f32), px(y as f32)), + size: size(px(width as f32), px(height as f32)), + }), + WindowBoundsJson::Maximized { + x, + y, + width, + height, + } => WindowBounds::Maximized(Bounds { + origin: point(px(x as f32), px(y as f32)), + size: size(px(width as f32), px(height as f32)), + }), + WindowBoundsJson::Fullscreen { + x, + y, + width, + height, + } => WindowBounds::Fullscreen(Bounds { + origin: point(px(x as f32), px(y as f32)), + size: size(px(width as f32), px(height as f32)), + }), + } + } +} + #[derive(Debug)] pub struct Breakpoint { pub position: u32, @@ -702,6 +833,18 @@ impl Domain for WorkspaceDb { sql!( DROP TABLE ssh_connections; ), + sql!( + ALTER TABLE remote_connections ADD COLUMN name TEXT; + ALTER TABLE remote_connections ADD COLUMN container_id TEXT; + ), + sql!( + CREATE TABLE IF NOT EXISTS trusted_worktrees ( + trust_id INTEGER PRIMARY KEY AUTOINCREMENT, + absolute_path TEXT, + user_name TEXT, + host_name TEXT + ) STRICT; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -728,9 +871,9 @@ impl WorkspaceDb { pub(crate) fn remote_workspace_for_roots>( &self, worktree_roots: &[P], - ssh_project_id: RemoteConnectionId, + remote_project_id: RemoteConnectionId, ) -> Option { - self.workspace_for_roots_internal(worktree_roots, Some(ssh_project_id)) + self.workspace_for_roots_internal(worktree_roots, Some(remote_project_id)) } pub(crate) fn workspace_for_roots_internal>( @@ -806,9 +949,20 @@ impl WorkspaceDb { order: paths_order, }); + let remote_connection_options = if let Some(remote_connection_id) = remote_connection_id { + self.remote_connection(remote_connection_id) + .context("Get remote connection") + .log_err() + } else { + None + }; + Some(SerializedWorkspace { id: workspace_id, - location: SerializedWorkspaceLocation::Local, + location: match remote_connection_options { + Some(options) => SerializedWorkspaceLocation::Remote(options), + None => SerializedWorkspaceLocation::Local, + }, paths, center_group: self .get_center_pane_group(workspace_id) @@ -1110,14 +1264,16 @@ impl WorkspaceDb { options: RemoteConnectionOptions, ) -> Result { let kind; - let user; + let mut user = None; let mut host = None; let mut port = None; let mut distro = None; + let mut name = None; + let mut container_id = None; match options { RemoteConnectionOptions::Ssh(options) => { kind = RemoteConnectionKind::Ssh; - host = Some(options.host); + host = Some(options.host.to_string()); port = options.port; user = options.username; } @@ -1126,8 +1282,22 @@ impl WorkspaceDb { distro = Some(options.distro_name); user = options.user; } + RemoteConnectionOptions::Docker(options) => { + kind = RemoteConnectionKind::Docker; + container_id = Some(options.container_id); + name = Some(options.name); + } } - Self::get_or_create_remote_connection_query(this, kind, host, port, user, distro) + Self::get_or_create_remote_connection_query( + this, + kind, + host, + port, + user, + distro, + name, + container_id, + ) } fn get_or_create_remote_connection_query( @@ -1137,6 +1307,8 @@ impl WorkspaceDb { port: Option, user: Option, distro: Option, + name: Option, + container_id: Option, ) -> Result { if let Some(id) = this.select_row_bound(sql!( SELECT id @@ -1146,7 +1318,9 @@ impl WorkspaceDb { host IS ? AND port IS ? AND user IS ? AND - distro IS ? + distro IS ? AND + name IS ? AND + container_id IS ? LIMIT 1 ))?(( kind.serialize(), @@ -1154,6 +1328,8 @@ impl WorkspaceDb { port, user.clone(), distro.clone(), + name.clone(), + container_id.clone(), ))? { Ok(RemoteConnectionId(id)) } else { @@ -1163,10 +1339,20 @@ impl WorkspaceDb { host, port, user, - distro - ) VALUES (?1, ?2, ?3, ?4, ?5) + distro, + name, + container_id + ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7) RETURNING id - ))?((kind.serialize(), host, port, user, distro))? + ))?(( + kind.serialize(), + host, + port, + user, + distro, + name, + container_id, + ))? .context("failed to insert remote project")?; Ok(RemoteConnectionId(id)) } @@ -1249,15 +1435,23 @@ impl WorkspaceDb { fn remote_connections(&self) -> Result> { Ok(self.select(sql!( SELECT - id, kind, host, port, user, distro + id, kind, host, port, user, distro, container_id, name FROM remote_connections ))?()? .into_iter() - .filter_map(|(id, kind, host, port, user, distro)| { + .filter_map(|(id, kind, host, port, user, distro, container_id, name)| { Some(( RemoteConnectionId(id), - Self::remote_connection_from_row(kind, host, port, user, distro)?, + Self::remote_connection_from_row( + kind, + host, + port, + user, + distro, + container_id, + name, + )?, )) }) .collect()) @@ -1267,13 +1461,13 @@ impl WorkspaceDb { &self, id: RemoteConnectionId, ) -> Result { - let (kind, host, port, user, distro) = self.select_row_bound(sql!( - SELECT kind, host, port, user, distro + let (kind, host, port, user, distro, container_id, name) = self.select_row_bound(sql!( + SELECT kind, host, port, user, distro, container_id, name FROM remote_connections WHERE id = ? ))?(id.0)? .context("no such remote connection")?; - Self::remote_connection_from_row(kind, host, port, user, distro) + Self::remote_connection_from_row(kind, host, port, user, distro, container_id, name) .context("invalid remote_connection row") } @@ -1283,6 +1477,8 @@ impl WorkspaceDb { port: Option, user: Option, distro: Option, + container_id: Option, + name: Option, ) -> Option { match RemoteConnectionKind::deserialize(&kind)? { RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { @@ -1290,32 +1486,21 @@ impl WorkspaceDb { user: user, })), RemoteConnectionKind::Ssh => Some(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host?, + host: host?.into(), port, username: user, ..Default::default() })), + RemoteConnectionKind::Docker => { + Some(RemoteConnectionOptions::Docker(DockerConnectionOptions { + container_id: container_id?, + name: name?, + upload_binary_over_docker_exec: false, + })) + } } } - pub(crate) fn last_window( - &self, - ) -> anyhow::Result<(Option, Option)> { - let mut prepared_query = - self.select::<(Option, Option)>(sql!( - SELECT - display, - window_state, window_x, window_y, window_width, window_height - FROM workspaces - WHERE paths - IS NOT NULL - ORDER BY timestamp DESC - LIMIT 1 - ))?; - let result = prepared_query()?; - Ok(result.into_iter().next().unwrap_or((None, None))) - } - query! { pub async fn delete_workspace_by_id(id: WorkspaceId) -> Result<()> { DELETE FROM workspaces @@ -1359,11 +1544,11 @@ impl WorkspaceDb { // If a local workspace points to WSL, this check will cause us to wait for the // WSL VM and file server to boot up. This can block for many seconds. // Supported scenarios use remote workspaces. - if !has_wsl_path - && paths.paths().iter().all(|path| path.exists()) - && paths.paths().iter().any(|path| path.is_dir()) - { - result.push((id, SerializedWorkspaceLocation::Local, paths)); + if !has_wsl_path && paths.paths().iter().all(|path| path.exists()) { + // Only show directories in recent projects + if paths.paths().iter().any(|path| path.is_dir()) { + result.push((id, SerializedWorkspaceLocation::Local, paths)); + } } else { delete_tasks.push(self.delete_workspace_by_id(id)); } @@ -1656,49 +1841,6 @@ impl WorkspaceDb { } } - pub async fn toolchain( - &self, - workspace_id: WorkspaceId, - worktree_id: WorktreeId, - relative_worktree_path: Arc, - language_name: LanguageName, - ) -> Result> { - self.write(move |this| { - let mut select = this - .select_bound(sql!( - SELECT - name, path, raw_json - FROM toolchains - WHERE - workspace_id = ? AND - language_name = ? AND - worktree_id = ? AND - relative_worktree_path = ? - )) - .context("select toolchain")?; - - let toolchain: Vec<(String, String, String)> = select(( - workspace_id, - language_name.as_ref().to_string(), - worktree_id.to_usize(), - relative_worktree_path.as_unix_str().to_string(), - ))?; - - Ok(toolchain - .into_iter() - .next() - .and_then(|(name, path, raw_json)| { - Some(Toolchain { - name: name.into(), - path: path.into(), - language_name, - as_json: serde_json::Value::from_str(&raw_json).ok()?, - }) - })) - }) - .await - } - pub(crate) async fn toolchains( &self, workspace_id: WorkspaceId, @@ -1773,6 +1915,135 @@ impl WorkspaceDb { Ok(()) }).await } + + pub(crate) async fn save_trusted_worktrees( + &self, + trusted_worktrees: HashMap, HashSet>, + ) -> anyhow::Result<()> { + use anyhow::Context as _; + use db::sqlez::statement::Statement; + use itertools::Itertools as _; + + DB.clear_trusted_worktrees() + .await + .context("clearing previous trust state")?; + + let trusted_worktrees = trusted_worktrees + .into_iter() + .flat_map(|(host, abs_paths)| { + abs_paths + .into_iter() + .map(move |abs_path| (Some(abs_path), host.clone())) + }) + .collect::>(); + let mut first_worktree; + let mut last_worktree = 0_usize; + for (count, placeholders) in std::iter::once("(?, ?, ?)") + .cycle() + .take(trusted_worktrees.len()) + .chunks(MAX_QUERY_PLACEHOLDERS / 3) + .into_iter() + .map(|chunk| { + let mut count = 0; + let placeholders = chunk + .inspect(|_| { + count += 1; + }) + .join(", "); + (count, placeholders) + }) + .collect::>() + { + first_worktree = last_worktree; + last_worktree = last_worktree + count; + let query = format!( + r#"INSERT INTO trusted_worktrees(absolute_path, user_name, host_name) +VALUES {placeholders};"# + ); + + let trusted_worktrees = trusted_worktrees[first_worktree..last_worktree].to_vec(); + self.write(move |conn| { + let mut statement = Statement::prepare(conn, query)?; + let mut next_index = 1; + for (abs_path, host) in trusted_worktrees { + let abs_path = abs_path.as_ref().map(|abs_path| abs_path.to_string_lossy()); + next_index = statement.bind( + &abs_path.as_ref().map(|abs_path| abs_path.as_ref()), + next_index, + )?; + next_index = statement.bind( + &host + .as_ref() + .and_then(|host| Some(host.user_name.as_ref()?.as_str())), + next_index, + )?; + next_index = statement.bind( + &host.as_ref().map(|host| host.host_identifier.as_str()), + next_index, + )?; + } + statement.exec() + }) + .await + .context("inserting new trusted state")?; + } + Ok(()) + } + + pub fn fetch_trusted_worktrees( + &self, + worktree_store: Option>, + host: Option, + cx: &App, + ) -> Result, HashSet>> { + let trusted_worktrees = DB.trusted_worktrees()?; + Ok(trusted_worktrees + .into_iter() + .filter_map(|(abs_path, user_name, host_name)| { + let db_host = match (user_name, host_name) { + (_, None) => None, + (None, Some(host_name)) => Some(RemoteHostLocation { + user_name: None, + host_identifier: SharedString::new(host_name), + }), + (Some(user_name), Some(host_name)) => Some(RemoteHostLocation { + user_name: Some(SharedString::new(user_name)), + host_identifier: SharedString::new(host_name), + }), + }; + + let abs_path = abs_path?; + Some(if db_host != host { + (db_host, PathTrust::AbsPath(abs_path)) + } else if let Some(worktree_store) = &worktree_store { + find_worktree_in_store(worktree_store.read(cx), &abs_path, cx) + .map(PathTrust::Worktree) + .map(|trusted_worktree| (host.clone(), trusted_worktree)) + .unwrap_or_else(|| (db_host.clone(), PathTrust::AbsPath(abs_path))) + } else { + (db_host, PathTrust::AbsPath(abs_path)) + }) + }) + .fold(HashMap::default(), |mut acc, (remote_host, path_trust)| { + acc.entry(remote_host) + .or_insert_with(HashSet::default) + .insert(path_trust); + acc + })) + } + + query! { + fn trusted_worktrees() -> Result, Option, Option)>> { + SELECT absolute_path, user_name, host_name + FROM trusted_worktrees + } + } + + query! { + pub async fn clear_trusted_worktrees() -> Result<()> { + DELETE FROM trusted_worktrees + } + } } pub fn delete_unloaded_items( @@ -2480,7 +2751,7 @@ mod tests { let connection_id = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: "my-host".to_string(), + host: "my-host".into(), port: Some(1234), ..Default::default() })) @@ -2669,7 +2940,7 @@ mod tests { .into_iter() .map(|(host, user)| async { let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host.to_string(), + host: host.into(), username: Some(user.to_string()), ..Default::default() }); @@ -2760,7 +3031,7 @@ mod tests { let connection_id = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host.clone(), + host: host.clone().into(), port, username: user.clone(), ..Default::default() @@ -2771,7 +3042,7 @@ mod tests { // Test that calling the function again with the same parameters returns the same project let same_connection = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host.clone(), + host: host.clone().into(), port, username: user.clone(), ..Default::default() @@ -2788,7 +3059,7 @@ mod tests { let different_connection = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host2.clone(), + host: host2.clone().into(), port: port2, username: user2.clone(), ..Default::default() @@ -2807,7 +3078,7 @@ mod tests { let connection_id = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host.clone(), + host: host.clone().into(), port, username: None, ..Default::default() @@ -2817,7 +3088,7 @@ mod tests { let same_connection_id = db .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions { - host: host.clone(), + host: host.clone().into(), port, username: user.clone(), ..Default::default() @@ -2847,7 +3118,7 @@ mod tests { ids.push( db.get_or_create_remote_connection(RemoteConnectionOptions::Ssh( SshConnectionOptions { - host: host.clone(), + host: host.clone().into(), port: *port, username: user.clone(), ..Default::default() diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index a37b2ebbe93efb23cad6a98f127ba1f8800a3eb3..08a3adf9ebd7fa49a5f8fb86eec65c66deb00421 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -32,6 +32,7 @@ pub(crate) struct RemoteConnectionId(pub u64); pub(crate) enum RemoteConnectionKind { Ssh, Wsl, + Docker, } #[derive(Debug, PartialEq, Clone)] @@ -75,6 +76,7 @@ impl RemoteConnectionKind { match self { RemoteConnectionKind::Ssh => "ssh", RemoteConnectionKind::Wsl => "wsl", + RemoteConnectionKind::Docker => "docker", } } @@ -82,6 +84,7 @@ impl RemoteConnectionKind { match text { "ssh" => Some(Self::Ssh), "wsl" => Some(Self::Wsl), + "docker" => Some(Self::Docker), _ => None, } } diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 9907df3be3eb8594f6cc8f63f05e2e93befd416c..badfe7d2437424c1ce18a1afde19507e7d6e1d3b 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -96,6 +96,7 @@ pub trait SearchableItem: Item + EventEmitter { fn update_matches( &mut self, matches: &[Self::Match], + active_match_index: Option, window: &mut Window, cx: &mut Context, ); @@ -104,7 +105,6 @@ pub trait SearchableItem: Item + EventEmitter { &mut self, index: usize, matches: &[Self::Match], - collapse: bool, window: &mut Window, cx: &mut Context, ); @@ -180,13 +180,18 @@ pub trait SearchableItemHandle: ItemHandle { handler: Box, ) -> Subscription; fn clear_matches(&self, window: &mut Window, cx: &mut App); - fn update_matches(&self, matches: &AnyVec, window: &mut Window, cx: &mut App); + fn update_matches( + &self, + matches: &AnyVec, + active_match_index: Option, + window: &mut Window, + cx: &mut App, + ); fn query_suggestion(&self, window: &mut Window, cx: &mut App) -> String; fn activate_match( &self, index: usize, matches: &AnyVec, - collapse: bool, window: &mut Window, cx: &mut App, ); @@ -266,10 +271,16 @@ impl SearchableItemHandle for Entity { fn clear_matches(&self, window: &mut Window, cx: &mut App) { self.update(cx, |this, cx| this.clear_matches(window, cx)); } - fn update_matches(&self, matches: &AnyVec, window: &mut Window, cx: &mut App) { + fn update_matches( + &self, + matches: &AnyVec, + active_match_index: Option, + window: &mut Window, + cx: &mut App, + ) { let matches = matches.downcast_ref().unwrap(); self.update(cx, |this, cx| { - this.update_matches(matches.as_slice(), window, cx) + this.update_matches(matches.as_slice(), active_match_index, window, cx) }); } fn query_suggestion(&self, window: &mut Window, cx: &mut App) -> String { @@ -279,13 +290,12 @@ impl SearchableItemHandle for Entity { &self, index: usize, matches: &AnyVec, - collapse: bool, window: &mut Window, cx: &mut App, ) { let matches = matches.downcast_ref().unwrap(); self.update(cx, |this, cx| { - this.activate_match(index, matches.as_slice(), collapse, window, cx) + this.activate_match(index, matches.as_slice(), window, cx) }); } @@ -402,13 +412,13 @@ impl SearchableItemHandle for Entity { impl From> for AnyView { fn from(this: Box) -> Self { - this.to_any() + this.to_any_view() } } impl From<&Box> for AnyView { fn from(this: &Box) -> Self { - this.to_any() + this.to_any_view() } } diff --git a/crates/workspace/src/security_modal.rs b/crates/workspace/src/security_modal.rs new file mode 100644 index 0000000000000000000000000000000000000000..bb1482d7cce2a9849a78a9512598e389a6e5eea0 --- /dev/null +++ b/crates/workspace/src/security_modal.rs @@ -0,0 +1,334 @@ +//! A UI interface for managing the [`TrustedWorktrees`] data. + +use std::{ + borrow::Cow, + path::{Path, PathBuf}, + sync::Arc, +}; + +use collections::{HashMap, HashSet}; +use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, WeakEntity}; + +use project::{ + WorktreeId, + trusted_worktrees::{PathTrust, RemoteHostLocation, TrustedWorktrees}, + worktree_store::WorktreeStore, +}; +use smallvec::SmallVec; +use theme::ActiveTheme; +use ui::{ + AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, prelude::*, +}; + +use crate::{DismissDecision, ModalView, ToggleWorktreeSecurity}; + +pub struct SecurityModal { + restricted_paths: HashMap, + home_dir: Option, + trust_parents: bool, + worktree_store: WeakEntity, + remote_host: Option, + focus_handle: FocusHandle, + trusted: Option, +} + +#[derive(Debug, PartialEq, Eq)] +struct RestrictedPath { + abs_path: Arc, + is_file: bool, + host: Option, +} + +impl Focusable for SecurityModal { + fn focus_handle(&self, _: &ui::App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter for SecurityModal {} + +impl ModalView for SecurityModal { + fn fade_out_background(&self) -> bool { + true + } + + fn on_before_dismiss(&mut self, _: &mut Window, _: &mut Context) -> DismissDecision { + match self.trusted { + Some(false) => telemetry::event!("Open in Restricted", source = "Worktree Trust Modal"), + Some(true) => telemetry::event!("Trust and Continue", source = "Worktree Trust Modal"), + None => telemetry::event!("Dismissed", source = "Worktree Trust Modal"), + } + DismissDecision::Dismiss(true) + } +} + +impl Render for SecurityModal { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + if self.restricted_paths.is_empty() { + self.dismiss(cx); + return v_flex().into_any_element(); + } + + let header_label = if self.restricted_paths.len() == 1 { + "Unrecognized Project" + } else { + "Unrecognized Projects" + }; + + let trust_label = self.build_trust_label(); + + AlertModal::new("security-modal") + .width(rems(40.)) + .key_context("SecurityModal") + .track_focus(&self.focus_handle(cx)) + .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { + this.trust_and_dismiss(cx); + })) + .on_action(cx.listener(|security_modal, _: &ToggleWorktreeSecurity, _window, cx| { + security_modal.trusted = Some(false); + security_modal.dismiss(cx); + })) + .header( + v_flex() + .p_3() + .gap_1() + .rounded_t_md() + .bg(cx.theme().colors().editor_background.opacity(0.5)) + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child(Label::new(header_label)), + ) + .children(self.restricted_paths.values().filter_map(|restricted_path| { + let abs_path = if restricted_path.is_file { + restricted_path.abs_path.parent() + } else { + Some(restricted_path.abs_path.as_ref()) + }?; + let label = match &restricted_path.host { + Some(remote_host) => match &remote_host.user_name { + Some(user_name) => format!( + "{} ({}@{})", + self.shorten_path(abs_path).display(), + user_name, + remote_host.host_identifier + ), + None => format!( + "{} ({})", + self.shorten_path(abs_path).display(), + remote_host.host_identifier + ), + }, + None => self.shorten_path(abs_path).display().to_string(), + }; + Some(h_flex() + .pl(IconSize::default().rems() + rems(0.5)) + .child(Label::new(label).color(Color::Muted))) + })), + ) + .child( + v_flex() + .gap_2() + .child( + v_flex() + .child( + Label::new( + "Untrusted projects are opened in Restricted Mode to protect your system.", + ) + .color(Color::Muted), + ) + .child( + Label::new( + "Review .zed/settings.json for any extensions or commands configured by this project.", + ) + .color(Color::Muted), + ), + ) + .child( + v_flex() + .child(Label::new("Restricted Mode prevents:").color(Color::Muted)) + .child(ListBulletItem::new("Project settings from being applied")) + .child(ListBulletItem::new("Language servers from running")) + .child(ListBulletItem::new("MCP Server integrations from installing")), + ) + .map(|this| match trust_label { + Some(trust_label) => this.child( + Checkbox::new("trust-parents", ToggleState::from(self.trust_parents)) + .label(trust_label) + .on_click(cx.listener( + |security_modal, state: &ToggleState, _, cx| { + security_modal.trust_parents = state.selected(); + cx.notify(); + cx.stop_propagation(); + }, + )), + ), + None => this, + }), + ) + .footer( + h_flex() + .px_3() + .pb_3() + .gap_1() + .justify_end() + .child( + Button::new("rm", "Stay in Restricted Mode") + .key_binding( + KeyBinding::for_action( + &ToggleWorktreeSecurity, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(move |security_modal, _, _, cx| { + security_modal.trusted = Some(false); + security_modal.dismiss(cx); + cx.stop_propagation(); + })), + ) + .child( + Button::new("tc", "Trust and Continue") + .style(ButtonStyle::Filled) + .layer(ui::ElevationIndex::ModalSurface) + .key_binding( + KeyBinding::for_action(&menu::Confirm, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(move |security_modal, _, _, cx| { + security_modal.trust_and_dismiss(cx); + cx.stop_propagation(); + })), + ), + ) + .into_any_element() + } +} + +impl SecurityModal { + pub fn new( + worktree_store: WeakEntity, + remote_host: Option>, + cx: &mut Context, + ) -> Self { + let mut this = Self { + worktree_store, + remote_host: remote_host.map(|host| host.into()), + restricted_paths: HashMap::default(), + focus_handle: cx.focus_handle(), + trust_parents: false, + home_dir: std::env::home_dir(), + trusted: None, + }; + this.refresh_restricted_paths(cx); + + this + } + + fn build_trust_label(&self) -> Option> { + let mut has_restricted_files = false; + let available_parents = self + .restricted_paths + .values() + .filter(|restricted_path| { + has_restricted_files |= restricted_path.is_file; + !restricted_path.is_file + }) + .filter_map(|restricted_path| restricted_path.abs_path.parent()) + .collect::>(); + match available_parents.len() { + 0 => { + if has_restricted_files { + Some(Cow::Borrowed("Trust all single files")) + } else { + None + } + } + 1 => Some(Cow::Owned(format!( + "Trust all projects in the {:} folder", + self.shorten_path(available_parents[0]).display() + ))), + _ => Some(Cow::Borrowed("Trust all projects in the parent folders")), + } + } + + fn shorten_path<'a>(&self, path: &'a Path) -> Cow<'a, Path> { + match &self.home_dir { + Some(home_dir) => path + .strip_prefix(home_dir) + .map(|stripped| Path::new("~").join(stripped)) + .map(Cow::Owned) + .unwrap_or(Cow::Borrowed(path)), + None => Cow::Borrowed(path), + } + } + + fn trust_and_dismiss(&mut self, cx: &mut Context) { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + let mut paths_to_trust = self + .restricted_paths + .keys() + .copied() + .map(PathTrust::Worktree) + .collect::>(); + if self.trust_parents { + paths_to_trust.extend(self.restricted_paths.values().filter_map( + |restricted_paths| { + if restricted_paths.is_file { + None + } else { + let parent_abs_path = + restricted_paths.abs_path.parent()?.to_owned(); + Some(PathTrust::AbsPath(parent_abs_path)) + } + }, + )); + } + trusted_worktrees.trust(paths_to_trust, self.remote_host.clone(), cx); + }); + } + + self.trusted = Some(true); + self.dismiss(cx); + } + + pub fn dismiss(&mut self, cx: &mut Context) { + cx.emit(DismissEvent); + } + + pub fn refresh_restricted_paths(&mut self, cx: &mut Context) { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + if let Some(worktree_store) = self.worktree_store.upgrade() { + let new_restricted_worktrees = trusted_worktrees + .read(cx) + .restricted_worktrees(worktree_store.read(cx), cx) + .into_iter() + .filter_map(|(worktree_id, abs_path)| { + let worktree = worktree_store.read(cx).worktree_for_id(worktree_id, cx)?; + Some(( + worktree_id, + RestrictedPath { + abs_path, + is_file: worktree.read(cx).is_single_file(), + host: self.remote_host.clone(), + }, + )) + }) + .collect::>(); + + if self.restricted_paths != new_restricted_worktrees { + self.trust_parents = false; + self.restricted_paths = new_restricted_worktrees; + cx.notify(); + } + } + } else if !self.restricted_paths.is_empty() { + self.restricted_paths.clear(); + cx.notify(); + } + } +} diff --git a/crates/workspace/src/shared_screen.rs b/crates/workspace/src/shared_screen.rs index 3c009f613ea52906649b73bb9fd657bab6906c3b..564560274699ab6685d481340c5efd4b6336ed56 100644 --- a/crates/workspace/src/shared_screen.rs +++ b/crates/workspace/src/shared_screen.rs @@ -42,6 +42,11 @@ impl SharedScreen { }) .detach(); + cx.observe_release(&room, |_, _, cx| { + cx.emit(Event::Close); + }) + .detach(); + let view = cx.new(|cx| RemoteVideoTrackView::new(track.clone(), window, cx)); cx.subscribe(&view, |_, _, ev, cx| match ev { call::RemoteVideoTrackViewEvent::Close => cx.emit(Event::Close), diff --git a/crates/workspace/src/theme_preview.rs b/crates/workspace/src/theme_preview.rs index 94a280b4da1283178201898bd3e8c2c71e5f0b1f..f978da706b7476d04bf656ed63faf5bd38b83d20 100644 --- a/crates/workspace/src/theme_preview.rs +++ b/crates/workspace/src/theme_preview.rs @@ -6,9 +6,9 @@ use strum::IntoEnumIterator; use theme::all_theme_colors; use ui::{ AudioStatus, Avatar, AvatarAudioStatusIndicator, AvatarAvailabilityIndicator, ButtonLike, - Checkbox, CheckboxWithLabel, CollaboratorAvailability, ContentGroup, DecoratedIcon, - ElevationIndex, Facepile, IconDecoration, Indicator, KeybindingHint, Switch, TintColor, - Tooltip, prelude::*, utils::calculate_contrast_ratio, + Checkbox, CollaboratorAvailability, ContentGroup, DecoratedIcon, ElevationIndex, Facepile, + IconDecoration, Indicator, KeybindingHint, Switch, TintColor, Tooltip, prelude::*, + utils::calculate_contrast_ratio, }; use crate::{Item, Workspace}; diff --git a/crates/workspace/src/utility_pane.rs b/crates/workspace/src/utility_pane.rs new file mode 100644 index 0000000000000000000000000000000000000000..2760000216d9164367c58d41d4f1b1893dc8cd75 --- /dev/null +++ b/crates/workspace/src/utility_pane.rs @@ -0,0 +1,282 @@ +use gpui::{ + AppContext as _, EntityId, MouseButton, Pixels, Render, StatefulInteractiveElement, + Subscription, WeakEntity, deferred, px, +}; +use ui::{ + ActiveTheme as _, Context, FluentBuilder as _, InteractiveElement as _, IntoElement, + ParentElement as _, RenderOnce, Styled as _, Window, div, +}; + +use crate::{ + DockPosition, Workspace, + dock::{ClosePane, MinimizePane, UtilityPane, UtilityPaneHandle}, +}; + +pub(crate) const UTILITY_PANE_RESIZE_HANDLE_SIZE: Pixels = px(6.0); +pub(crate) const UTILITY_PANE_MIN_WIDTH: Pixels = px(20.0); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UtilityPaneSlot { + Left, + Right, +} + +struct UtilityPaneSlotState { + panel_id: EntityId, + utility_pane: Box, + _subscriptions: Vec, +} + +#[derive(Default)] +pub struct UtilityPaneState { + left_slot: Option, + right_slot: Option, +} + +#[derive(Clone)] +pub struct DraggedUtilityPane(pub UtilityPaneSlot); + +impl Render for DraggedUtilityPane { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + gpui::Empty + } +} + +pub fn utility_slot_for_dock_position(position: DockPosition) -> UtilityPaneSlot { + match position { + DockPosition::Left => UtilityPaneSlot::Left, + DockPosition::Right => UtilityPaneSlot::Right, + DockPosition::Bottom => UtilityPaneSlot::Left, + } +} + +impl Workspace { + pub fn utility_pane(&self, slot: UtilityPaneSlot) -> Option<&dyn UtilityPaneHandle> { + match slot { + UtilityPaneSlot::Left => self + .utility_panes + .left_slot + .as_ref() + .map(|s| s.utility_pane.as_ref()), + UtilityPaneSlot::Right => self + .utility_panes + .right_slot + .as_ref() + .map(|s| s.utility_pane.as_ref()), + } + } + + pub fn toggle_utility_pane( + &mut self, + slot: UtilityPaneSlot, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(handle) = self.utility_pane(slot) { + let current = handle.expanded(cx); + handle.set_expanded(!current, cx); + } + cx.notify(); + self.serialize_workspace(window, cx); + } + + pub fn register_utility_pane( + &mut self, + slot: UtilityPaneSlot, + panel_id: EntityId, + handle: gpui::Entity, + cx: &mut Context, + ) { + let minimize_subscription = + cx.subscribe(&handle, move |this, _, _event: &MinimizePane, cx| { + if let Some(handle) = this.utility_pane(slot) { + handle.set_expanded(false, cx); + } + cx.notify(); + }); + + let close_subscription = cx.subscribe(&handle, move |this, _, _event: &ClosePane, cx| { + this.clear_utility_pane(slot, cx); + }); + + let subscriptions = vec![minimize_subscription, close_subscription]; + let boxed_handle: Box = Box::new(handle); + + match slot { + UtilityPaneSlot::Left => { + self.utility_panes.left_slot = Some(UtilityPaneSlotState { + panel_id, + utility_pane: boxed_handle, + _subscriptions: subscriptions, + }); + } + UtilityPaneSlot::Right => { + self.utility_panes.right_slot = Some(UtilityPaneSlotState { + panel_id, + utility_pane: boxed_handle, + _subscriptions: subscriptions, + }); + } + } + cx.notify(); + } + + pub fn clear_utility_pane(&mut self, slot: UtilityPaneSlot, cx: &mut Context) { + match slot { + UtilityPaneSlot::Left => { + self.utility_panes.left_slot = None; + } + UtilityPaneSlot::Right => { + self.utility_panes.right_slot = None; + } + } + cx.notify(); + } + + pub fn clear_utility_pane_if_provider( + &mut self, + slot: UtilityPaneSlot, + provider_panel_id: EntityId, + cx: &mut Context, + ) { + let should_clear = match slot { + UtilityPaneSlot::Left => self + .utility_panes + .left_slot + .as_ref() + .is_some_and(|slot| slot.panel_id == provider_panel_id), + UtilityPaneSlot::Right => self + .utility_panes + .right_slot + .as_ref() + .is_some_and(|slot| slot.panel_id == provider_panel_id), + }; + + if should_clear { + self.clear_utility_pane(slot, cx); + } + } + + pub fn resize_utility_pane( + &mut self, + slot: UtilityPaneSlot, + new_width: Pixels, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(handle) = self.utility_pane(slot) { + let max_width = self.max_utility_pane_width(window, cx); + let width = new_width.max(UTILITY_PANE_MIN_WIDTH).min(max_width); + handle.set_width(Some(width), cx); + cx.notify(); + self.serialize_workspace(window, cx); + } + } + + pub fn reset_utility_pane_width( + &mut self, + slot: UtilityPaneSlot, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(handle) = self.utility_pane(slot) { + handle.set_width(None, cx); + cx.notify(); + self.serialize_workspace(window, cx); + } + } +} + +#[derive(IntoElement)] +pub struct UtilityPaneFrame { + workspace: WeakEntity, + slot: UtilityPaneSlot, + handle: Box, +} + +impl UtilityPaneFrame { + pub fn new( + slot: UtilityPaneSlot, + handle: Box, + cx: &mut Context, + ) -> Self { + let workspace = cx.weak_entity(); + Self { + workspace, + slot, + handle, + } + } +} + +impl RenderOnce for UtilityPaneFrame { + fn render(self, _window: &mut Window, cx: &mut ui::App) -> impl IntoElement { + let workspace = self.workspace.clone(); + let slot = self.slot; + let width = self.handle.width(cx); + + let create_resize_handle = || { + let workspace_handle = workspace.clone(); + let handle = div() + .id(match slot { + UtilityPaneSlot::Left => "utility-pane-resize-handle-left", + UtilityPaneSlot::Right => "utility-pane-resize-handle-right", + }) + .on_drag(DraggedUtilityPane(slot), move |pane, _, _, cx| { + cx.stop_propagation(); + cx.new(|_| pane.clone()) + }) + .on_mouse_down(MouseButton::Left, move |_, _, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + move |e: &gpui::MouseUpEvent, window, cx| { + if e.click_count == 2 { + workspace_handle + .update(cx, |workspace, cx| { + workspace.reset_utility_pane_width(slot, window, cx); + }) + .ok(); + cx.stop_propagation(); + } + }, + ) + .occlude(); + + match slot { + UtilityPaneSlot::Left => deferred( + handle + .absolute() + .right(-UTILITY_PANE_RESIZE_HANDLE_SIZE / 2.) + .top(px(0.)) + .h_full() + .w(UTILITY_PANE_RESIZE_HANDLE_SIZE) + .cursor_col_resize(), + ), + UtilityPaneSlot::Right => deferred( + handle + .absolute() + .left(-UTILITY_PANE_RESIZE_HANDLE_SIZE / 2.) + .top(px(0.)) + .h_full() + .w(UTILITY_PANE_RESIZE_HANDLE_SIZE) + .cursor_col_resize(), + ), + } + }; + + div() + .h_full() + .bg(cx.theme().colors().tab_bar_background) + .w(width) + .border_color(cx.theme().colors().border) + .when(self.slot == UtilityPaneSlot::Left, |this| this.border_r_1()) + .when(self.slot == UtilityPaneSlot::Right, |this| { + this.border_l_1() + }) + .child(create_resize_handle()) + .child(self.handle.to_any()) + .into_any_element() + } +} diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs new file mode 100644 index 0000000000000000000000000000000000000000..4d84f3072f87ffa3246a313cbc749ddd61287d25 --- /dev/null +++ b/crates/workspace/src/welcome.rs @@ -0,0 +1,568 @@ +use crate::{ + NewFile, Open, PathList, SerializedWorkspaceLocation, WORKSPACE_DB, Workspace, WorkspaceId, + item::{Item, ItemEvent}, +}; +use git::Clone as GitClone; +use gpui::WeakEntity; +use gpui::{ + Action, App, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, + ParentElement, Render, Styled, Task, Window, actions, +}; +use menu::{SelectNext, SelectPrevious}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*}; +use util::ResultExt; +use zed_actions::{Extensions, OpenOnboarding, OpenSettings, agent, command_palette}; + +#[derive(PartialEq, Clone, Debug, Deserialize, Serialize, JsonSchema, Action)] +#[action(namespace = welcome)] +#[serde(transparent)] +pub struct OpenRecentProject { + pub index: usize, +} + +actions!( + zed, + [ + /// Show the Zed welcome screen + ShowWelcome + ] +); + +#[derive(IntoElement)] +struct SectionHeader { + title: SharedString, +} + +impl SectionHeader { + fn new(title: impl Into) -> Self { + Self { + title: title.into(), + } + } +} + +impl RenderOnce for SectionHeader { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + h_flex() + .px_1() + .mb_2() + .gap_2() + .child( + Label::new(self.title.to_ascii_uppercase()) + .buffer_font(cx) + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .child(Divider::horizontal().color(DividerColor::BorderVariant)) + } +} + +#[derive(IntoElement)] +struct SectionButton { + label: SharedString, + icon: IconName, + action: Box, + tab_index: usize, + focus_handle: FocusHandle, +} + +impl SectionButton { + fn new( + label: impl Into, + icon: IconName, + action: &dyn Action, + tab_index: usize, + focus_handle: FocusHandle, + ) -> Self { + Self { + label: label.into(), + icon, + action: action.boxed_clone(), + tab_index, + focus_handle, + } + } +} + +impl RenderOnce for SectionButton { + fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let id = format!("onb-button-{}", self.label); + let action_ref: &dyn Action = &*self.action; + + ButtonLike::new(id) + .tab_index(self.tab_index as isize) + .full_width() + .size(ButtonSize::Medium) + .child( + h_flex() + .w_full() + .justify_between() + .child( + h_flex() + .gap_2() + .child( + Icon::new(self.icon) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(Label::new(self.label)), + ) + .child( + KeyBinding::for_action_in(action_ref, &self.focus_handle, cx) + .size(rems_from_px(12.)), + ), + ) + .on_click(move |_, window, cx| window.dispatch_action(self.action.boxed_clone(), cx)) + } +} + +struct SectionEntry { + icon: IconName, + title: &'static str, + action: &'static dyn Action, +} + +impl SectionEntry { + fn render(&self, button_index: usize, focus: &FocusHandle, _cx: &App) -> impl IntoElement { + SectionButton::new( + self.title, + self.icon, + self.action, + button_index, + focus.clone(), + ) + } +} + +const CONTENT: (Section<4>, Section<3>) = ( + Section { + title: "Get Started", + entries: [ + SectionEntry { + icon: IconName::Plus, + title: "New File", + action: &NewFile, + }, + SectionEntry { + icon: IconName::FolderOpen, + title: "Open Project", + action: &Open, + }, + SectionEntry { + icon: IconName::CloudDownload, + title: "Clone Repository", + action: &GitClone, + }, + SectionEntry { + icon: IconName::ListCollapse, + title: "Open Command Palette", + action: &command_palette::Toggle, + }, + ], + }, + Section { + title: "Configure", + entries: [ + SectionEntry { + icon: IconName::Settings, + title: "Open Settings", + action: &OpenSettings, + }, + SectionEntry { + icon: IconName::ZedAssistant, + title: "View AI Settings", + action: &agent::OpenSettings, + }, + SectionEntry { + icon: IconName::Blocks, + title: "Explore Extensions", + action: &Extensions { + category_filter: None, + id: None, + }, + }, + ], + }, +); + +struct Section { + title: &'static str, + entries: [SectionEntry; COLS], +} + +impl Section { + fn render(self, index_offset: usize, focus: &FocusHandle, cx: &App) -> impl IntoElement { + v_flex() + .min_w_full() + .child(SectionHeader::new(self.title)) + .children( + self.entries + .iter() + .enumerate() + .map(|(index, entry)| entry.render(index_offset + index, focus, cx)), + ) + } +} + +pub struct WelcomePage { + workspace: WeakEntity, + focus_handle: FocusHandle, + fallback_to_recent_projects: bool, + recent_workspaces: Option>, +} + +impl WelcomePage { + pub fn new( + workspace: WeakEntity, + fallback_to_recent_projects: bool, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + cx.on_focus(&focus_handle, window, |_, _, cx| cx.notify()) + .detach(); + + if fallback_to_recent_projects { + cx.spawn_in(window, async move |this: WeakEntity, cx| { + let workspaces = WORKSPACE_DB + .recent_workspaces_on_disk() + .await + .log_err() + .unwrap_or_default(); + + this.update(cx, |this, cx| { + this.recent_workspaces = Some(workspaces); + cx.notify(); + }) + .ok(); + }) + .detach(); + } + + WelcomePage { + workspace, + focus_handle, + fallback_to_recent_projects, + recent_workspaces: None, + } + } + + fn select_next(&mut self, _: &SelectNext, window: &mut Window, cx: &mut Context) { + window.focus_next(cx); + cx.notify(); + } + + fn select_previous(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context) { + window.focus_prev(cx); + cx.notify(); + } + + fn open_recent_project( + &mut self, + action: &OpenRecentProject, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(recent_workspaces) = &self.recent_workspaces { + if let Some((_workspace_id, location, paths)) = recent_workspaces.get(action.index) { + let paths = paths.clone(); + let location = location.clone(); + let is_local = matches!(location, SerializedWorkspaceLocation::Local); + let workspace = self.workspace.clone(); + + if is_local { + let paths = paths.paths().to_vec(); + cx.spawn_in(window, async move |_, cx| { + let _ = workspace.update_in(cx, |workspace, window, cx| { + workspace + .open_workspace_for_paths(true, paths, window, cx) + .detach(); + }); + }) + .detach(); + } else { + use zed_actions::OpenRecent; + window.dispatch_action(OpenRecent::default().boxed_clone(), cx); + } + } + } + } + + fn render_recent_project_section( + &self, + recent_projects: Vec, + ) -> impl IntoElement { + v_flex() + .w_full() + .child(SectionHeader::new("Recent Projects")) + .children(recent_projects) + } + + fn render_recent_project( + &self, + index: usize, + location: &SerializedWorkspaceLocation, + paths: &PathList, + ) -> impl IntoElement { + let (icon, title) = match location { + SerializedWorkspaceLocation::Local => { + let path = paths.paths().first().map(|p| p.as_path()); + let name = path + .and_then(|p| p.file_name()) + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| "Untitled".to_string()); + (IconName::Folder, name) + } + SerializedWorkspaceLocation::Remote(_) => { + (IconName::Server, "Remote Project".to_string()) + } + }; + + SectionButton::new( + title, + icon, + &OpenRecentProject { index }, + 10, + self.focus_handle.clone(), + ) + } +} + +impl Render for WelcomePage { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let (first_section, second_section) = CONTENT; + let first_section_entries = first_section.entries.len(); + let last_index = first_section_entries + second_section.entries.len(); + + let recent_projects = self + .recent_workspaces + .as_ref() + .into_iter() + .flatten() + .take(5) + .enumerate() + .map(|(index, (_, loc, paths))| self.render_recent_project(index, loc, paths)) + .collect::>(); + + let second_section = if self.fallback_to_recent_projects && !recent_projects.is_empty() { + self.render_recent_project_section(recent_projects) + .into_any_element() + } else { + second_section + .render(first_section_entries, &self.focus_handle, cx) + .into_any_element() + }; + + let welcome_label = if self.fallback_to_recent_projects { + "Welcome back to Zed" + } else { + "Welcome to Zed" + }; + + h_flex() + .key_context("Welcome") + .track_focus(&self.focus_handle(cx)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::open_recent_project)) + .size_full() + .justify_center() + .overflow_hidden() + .bg(cx.theme().colors().editor_background) + .child( + h_flex() + .relative() + .size_full() + .px_12() + .py_40() + .max_w(px(1100.)) + .child( + v_flex() + .size_full() + .max_w_128() + .mx_auto() + .gap_6() + .overflow_x_hidden() + .child( + h_flex() + .w_full() + .justify_center() + .mb_4() + .gap_4() + .child(Vector::square(VectorName::ZedLogo, rems_from_px(45.))) + .child( + v_flex().child(Headline::new(welcome_label)).child( + Label::new("The editor for what's next") + .size(LabelSize::Small) + .color(Color::Muted) + .italic(), + ), + ), + ) + .child(first_section.render(Default::default(), &self.focus_handle, cx)) + .child(second_section) + .when(!self.fallback_to_recent_projects, |this| { + this.child( + v_flex().gap_1().child(Divider::horizontal()).child( + Button::new("welcome-exit", "Return to Onboarding") + .tab_index(last_index as isize) + .full_width() + .label_size(LabelSize::XSmall) + .on_click(|_, window, cx| { + window.dispatch_action( + OpenOnboarding.boxed_clone(), + cx, + ); + }), + ), + ) + }), + ), + ) + } +} + +impl EventEmitter for WelcomePage {} + +impl Focusable for WelcomePage { + fn focus_handle(&self, _: &App) -> gpui::FocusHandle { + self.focus_handle.clone() + } +} + +impl Item for WelcomePage { + type Event = ItemEvent; + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + "Welcome".into() + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("New Welcome Page Opened") + } + + fn show_toolbar(&self) -> bool { + false + } + + fn to_item_events(event: &Self::Event, mut f: impl FnMut(crate::item::ItemEvent)) { + f(*event) + } +} + +impl crate::SerializableItem for WelcomePage { + fn serialized_item_kind() -> &'static str { + "WelcomePage" + } + + fn cleanup( + workspace_id: crate::WorkspaceId, + alive_items: Vec, + _window: &mut Window, + cx: &mut App, + ) -> Task> { + crate::delete_unloaded_items( + alive_items, + workspace_id, + "welcome_pages", + &persistence::WELCOME_PAGES, + cx, + ) + } + + fn deserialize( + _project: Entity, + workspace: gpui::WeakEntity, + workspace_id: crate::WorkspaceId, + item_id: crate::ItemId, + window: &mut Window, + cx: &mut App, + ) -> Task>> { + if persistence::WELCOME_PAGES + .get_welcome_page(item_id, workspace_id) + .ok() + .is_some_and(|is_open| is_open) + { + Task::ready(Ok( + cx.new(|cx| WelcomePage::new(workspace, false, window, cx)) + )) + } else { + Task::ready(Err(anyhow::anyhow!("No welcome page to deserialize"))) + } + } + + fn serialize( + &mut self, + workspace: &mut Workspace, + item_id: crate::ItemId, + _closing: bool, + _window: &mut Window, + cx: &mut Context, + ) -> Option>> { + let workspace_id = workspace.database_id()?; + Some(cx.background_spawn(async move { + persistence::WELCOME_PAGES + .save_welcome_page(item_id, workspace_id, true) + .await + })) + } + + fn should_serialize(&self, event: &Self::Event) -> bool { + event == &ItemEvent::UpdateTab + } +} + +mod persistence { + use crate::WorkspaceDb; + use db::{ + query, + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, + }; + + pub struct WelcomePagesDb(ThreadSafeConnection); + + impl Domain for WelcomePagesDb { + const NAME: &str = stringify!(WelcomePagesDb); + + const MIGRATIONS: &[&str] = (&[sql!( + CREATE TABLE welcome_pages ( + workspace_id INTEGER, + item_id INTEGER UNIQUE, + is_open INTEGER DEFAULT FALSE, + + PRIMARY KEY(workspace_id, item_id), + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT; + )]); + } + + db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]); + + impl WelcomePagesDb { + query! { + pub async fn save_welcome_page( + item_id: crate::ItemId, + workspace_id: crate::WorkspaceId, + is_open: bool + ) -> Result<()> { + INSERT OR REPLACE INTO welcome_pages(item_id, workspace_id, is_open) + VALUES (?, ?, ?) + } + } + + query! { + pub fn get_welcome_page( + item_id: crate::ItemId, + workspace_id: crate::WorkspaceId + ) -> Result { + SELECT is_open + FROM welcome_pages + WHERE item_id = ? AND workspace_id = ? + } + } + } +} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7e35510652b1118e9dc8ffa18491d3c2a7904c75..b636414250c0463eca019ad30321b19d67680fd3 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9,12 +9,15 @@ pub mod pane_group; mod path_list; mod persistence; pub mod searchable; +mod security_modal; pub mod shared_screen; mod status_bar; pub mod tasks; mod theme_preview; mod toast_layer; mod toolbar; +pub mod utility_pane; +pub mod welcome; mod workspace_settings; pub use crate::notifications::NotificationFrame; @@ -30,6 +33,7 @@ use client::{ }; use collections::{HashMap, HashSet, hash_map}; use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE}; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; use futures::{ Future, FutureExt, StreamExt, channel::{ @@ -74,7 +78,9 @@ use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, WorktreeSettings, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, + project_settings::ProjectSettings, toolchain_store::ToolchainStoreEvent, + trusted_worktrees::{TrustedWorktrees, TrustedWorktreesEvent}, }; use remote::{ RemoteClientDelegate, RemoteConnection, RemoteConnectionOptions, @@ -83,7 +89,9 @@ use remote::{ use schemars::JsonSchema; use serde::Deserialize; use session::AppSession; -use settings::{CenteredPaddingSettings, Settings, SettingsLocation, update_settings_file}; +use settings::{ + CenteredPaddingSettings, Settings, SettingsLocation, SettingsStore, update_settings_file, +}; use shared_screen::SharedScreen; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -126,11 +134,19 @@ pub use workspace_settings::{ }; use zed_actions::{Spawn, feedback::FileBugReport}; -use crate::persistence::{ - SerializedAxis, - model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup}, +use crate::{ + item::ItemBufferKind, + notifications::NotificationId, + utility_pane::{UTILITY_PANE_MIN_WIDTH, utility_slot_for_dock_position}, +}; +use crate::{ + persistence::{ + SerializedAxis, + model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup}, + }, + security_modal::SecurityModal, + utility_pane::{DraggedUtilityPane, UtilityPaneFrame, UtilityPaneSlot, UtilityPaneState}, }; -use crate::{item::ItemBufferKind, notifications::NotificationId}; pub const SERIALIZATION_THROTTLE_TIME: Duration = Duration::from_millis(200); @@ -199,6 +215,8 @@ actions!( AddFolderToProject, /// Clears all notifications. ClearAllNotifications, + /// Clears all navigation history, including forward/backward navigation, recently opened files, and recently closed tabs. **This action is irreversible**. + ClearNavigationHistory, /// Closes the active dock. CloseActiveDock, /// Closes all docks. @@ -263,6 +281,16 @@ actions!( ToggleRightDock, /// Toggles zoom on the active pane. ToggleZoom, + /// Zooms in on the active pane. + ZoomIn, + /// Zooms out of the active pane. + ZoomOut, + /// If any worktrees are in restricted mode, shows a modal with possible actions. + /// If the modal is shown already, closes it without trusting any worktree. + ToggleWorktreeSecurity, + /// Clears all trusted worktrees, placing them in restricted mode on next open. + /// Requires restart to take effect on already opened projects. + ClearTrustedWorktrees, /// Stops following a collaborator. Unfollow, /// Restores the banner. @@ -435,6 +463,8 @@ actions!( SwapPaneUp, /// Swaps the current pane with the one below. SwapPaneDown, + // Swaps the current pane with the first available adjacent pane (searching in order: below, above, right, left) and activates that pane. + SwapPaneAdjacent, /// Move the current pane to be at the far left. MovePaneLeft, /// Move the current pane to be at the far right. @@ -565,44 +595,43 @@ pub fn init(app_state: Arc, cx: &mut App) { toast_layer::init(cx); history_manager::init(cx); - cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx)); - cx.on_action(|_: &Reload, cx| reload(cx)); - - cx.on_action({ - let app_state = Arc::downgrade(&app_state); - move |_: &Open, cx: &mut App| { - if let Some(app_state) = app_state.upgrade() { - prompt_and_open_paths( - app_state, - PathPromptOptions { - files: true, - directories: true, - multiple: true, - prompt: None, - }, - cx, - ); + cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx)) + .on_action(|_: &Reload, cx| reload(cx)) + .on_action({ + let app_state = Arc::downgrade(&app_state); + move |_: &Open, cx: &mut App| { + if let Some(app_state) = app_state.upgrade() { + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories: true, + multiple: true, + prompt: None, + }, + cx, + ); + } } - } - }); - cx.on_action({ - let app_state = Arc::downgrade(&app_state); - move |_: &OpenFiles, cx: &mut App| { - let directories = cx.can_select_mixed_files_and_dirs(); - if let Some(app_state) = app_state.upgrade() { - prompt_and_open_paths( - app_state, - PathPromptOptions { - files: true, - directories, - multiple: true, - prompt: None, - }, - cx, - ); + }) + .on_action({ + let app_state = Arc::downgrade(&app_state); + move |_: &OpenFiles, cx: &mut App| { + let directories = cx.can_select_mixed_files_and_dirs(); + if let Some(app_state) = app_state.upgrade() { + prompt_and_open_paths( + app_state, + PathPromptOptions { + files: true, + directories, + multiple: true, + prompt: None, + }, + cx, + ); + } } - } - }); + }); } type BuildProjectItemFn = @@ -671,6 +700,7 @@ impl ProjectItemRegistry { Ok((project_entry_id, build_workspace_item)) } Err(e) => { + log::warn!("Failed to open a project item: {e:#}"); if e.error_code() == ErrorCode::Internal { if let Some(abs_path) = entry_abs_path.as_deref().filter(|_| is_file) @@ -958,6 +988,7 @@ impl AppState { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut App) -> Arc { + use fs::Fs; use node_runtime::NodeRuntime; use session::Session; use settings::SettingsStore; @@ -968,6 +999,7 @@ impl AppState { } let fs = fs::FakeFs::new(cx.background_executor().clone()); + ::set_global(fs.clone(), cx); let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); let clock = Arc::new(clock::FakeSystemClock::new()); let http_client = http_client::FakeHttpClient::with_404_response(); @@ -1156,6 +1188,7 @@ pub struct Workspace { _observe_current_user: Task>, _schedule_serialize_workspace: Option>, _schedule_serialize_ssh_paths: Option>, + _schedule_serialize_worktree_trust: Task<()>, pane_history_timestamp: Arc, bounds: Bounds, pub centered_layout: bool, @@ -1170,6 +1203,7 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + utility_panes: UtilityPaneState, } impl EventEmitter for Workspace {} @@ -1200,6 +1234,41 @@ impl Workspace { window: &mut Window, cx: &mut Context, ) -> Self { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + cx.subscribe(&trusted_worktrees, |workspace, worktrees_store, e, cx| { + if let TrustedWorktreesEvent::Trusted(..) = e { + // Do not persist auto trusted worktrees + if !ProjectSettings::get_global(cx).session.trust_all_worktrees { + let new_trusted_worktrees = + worktrees_store.update(cx, |worktrees_store, cx| { + worktrees_store.trusted_paths_for_serialization(cx) + }); + let timeout = cx.background_executor().timer(SERIALIZATION_THROTTLE_TIME); + workspace._schedule_serialize_worktree_trust = + cx.background_spawn(async move { + timeout.await; + persistence::DB + .save_trusted_worktrees(new_trusted_worktrees) + .await + .log_err(); + }); + } + } + }) + .detach(); + + cx.observe_global::(|_, cx| { + if ProjectSettings::get_global(cx).session.trust_all_worktrees { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.auto_trust_all(cx); + }) + } + } + }) + .detach(); + } + cx.subscribe_in(&project, window, move |this, _, event, window, cx| { match event { project::Event::RemoteIdChanged(_) => { @@ -1210,11 +1279,25 @@ impl Workspace { this.collaborator_left(*peer_id, window, cx); } - project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded(_) => { - this.update_window_title(window, cx); - this.serialize_workspace(window, cx); - // This event could be triggered by `AddFolderToProject` or `RemoveFromProject`. - this.update_history(cx); + project::Event::WorktreeUpdatedEntries(worktree_id, _) => { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust(*worktree_id, cx); + }); + } + } + + project::Event::WorktreeRemoved(_) => { + this.update_worktree_data(window, cx); + } + + project::Event::WorktreeAdded(worktree_id) => { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust(*worktree_id, cx); + }); + } + this.update_worktree_data(window, cx); } project::Event::DisconnectedFromHost => { @@ -1311,7 +1394,7 @@ impl Workspace { cx.on_focus_lost(window, |this, window, cx| { let focus_handle = this.focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); }) .detach(); @@ -1335,7 +1418,7 @@ impl Workspace { cx.subscribe_in(¢er_pane, window, Self::handle_pane_event) .detach(); - window.focus(¢er_pane.focus_handle(cx)); + window.focus(¢er_pane.focus_handle(cx), cx); cx.emit(Event::PaneAdded(center_pane.clone())); @@ -1426,6 +1509,15 @@ impl Workspace { && let Ok(display_uuid) = display.uuid() { let window_bounds = window.inner_window_bounds(); + let has_paths = !this.root_paths(cx).is_empty(); + if !has_paths { + cx.background_executor() + .spawn(persistence::write_default_window_bounds( + window_bounds, + display_uuid, + )) + .detach_and_log_err(cx); + } if let Some(database_id) = workspace_id { cx.background_executor() .spawn(DB.set_window_open_status( @@ -1434,6 +1526,13 @@ impl Workspace { display_uuid, )) .detach_and_log_err(cx); + } else { + cx.background_executor() + .spawn(persistence::write_default_window_bounds( + window_bounds, + display_uuid, + )) + .detach_and_log_err(cx); } } this.bounds_save_task_queued.take(); @@ -1457,16 +1556,21 @@ impl Workspace { }), ]; - cx.defer_in(window, |this, window, cx| { + cx.defer_in(window, move |this, window, cx| { this.update_window_title(window, cx); this.show_initial_notifications(cx); }); + + let mut center = PaneGroup::new(center_pane.clone()); + center.set_is_center(true); + center.mark_positions(cx); + Workspace { weak_self: weak_handle.clone(), zoomed: None, zoomed_position: None, previous_dock_drag_coordinates: None, - center: PaneGroup::new(center_pane.clone()), + center, panes: vec![center_pane.clone()], panes_by_item: Default::default(), active_pane: center_pane.clone(), @@ -1495,6 +1599,7 @@ impl Workspace { _apply_leader_updates, _schedule_serialize_workspace: None, _schedule_serialize_ssh_paths: None, + _schedule_serialize_worktree_trust: Task::ready(()), leader_updates_tx, _subscriptions: subscriptions, pane_history_timestamp, @@ -1514,6 +1619,7 @@ impl Workspace { scheduled_tasks: Vec::new(), last_open_dock_positions: Vec::new(), removing: false, + utility_panes: UtilityPaneState::default(), } } @@ -1522,6 +1628,7 @@ impl Workspace { app_state: Arc, requesting_window: Option>, env: Option>, + init: Option) + Send>>, cx: &mut App, ) -> Task< anyhow::Result<( @@ -1536,6 +1643,7 @@ impl Workspace { app_state.languages.clone(), app_state.fs.clone(), env, + true, cx, ); @@ -1628,29 +1736,41 @@ impl Workspace { ); workspace.centered_layout = centered_layout; + + // Call init callback to add items before window renders + if let Some(init) = init { + init(&mut workspace, window, cx); + } + workspace }); })?; window } else { let window_bounds_override = window_bounds_env_override(); + let is_empty_workspace = project_paths.is_empty(); let (window_bounds, display) = if let Some(bounds) = window_bounds_override { (Some(WindowBounds::Windowed(bounds)), None) - } else { - let restorable_bounds = serialized_workspace - .as_ref() - .and_then(|workspace| Some((workspace.display?, workspace.window_bounds?))) - .or_else(|| { - let (display, window_bounds) = DB.last_window().log_err()?; - Some((display?, window_bounds?)) - }); - - if let Some((serialized_display, serialized_status)) = restorable_bounds { - (Some(serialized_status.0), Some(serialized_display)) + } else if let Some(workspace) = serialized_workspace.as_ref() { + // Reopening an existing workspace - restore its saved bounds + if let (Some(display), Some(bounds)) = + (workspace.display, workspace.window_bounds.as_ref()) + { + (Some(bounds.0), Some(display)) } else { (None, None) } + } else if is_empty_workspace { + // Empty workspace - try to restore the last known no-project window bounds + if let Some((display, bounds)) = persistence::read_default_window_bounds() { + (Some(bounds), Some(display)) + } else { + (None, None) + } + } else { + // New window - let GPUI's default_bounds() handle cascading + (None, None) }; // Use the serialized workspace to construct the new window @@ -1673,6 +1793,12 @@ impl Workspace { cx, ); workspace.centered_layout = centered_layout; + + // Call init callback to add items before window renders + if let Some(init) = init { + init(&mut workspace, window, cx); + } + workspace }) } @@ -1768,10 +1894,18 @@ impl Workspace { window: &mut Window, cx: &mut Context, ) { + let mut found_in_dock = None; for dock in [&self.left_dock, &self.bottom_dock, &self.right_dock] { - dock.update(cx, |dock, cx| { - dock.remove_panel(panel, window, cx); - }) + let found = dock.update(cx, |dock, cx| dock.remove_panel(panel, window, cx)); + + if found { + found_in_dock = Some(dock.clone()); + } + } + if let Some(found_in_dock) = found_in_dock { + let position = found_in_dock.read(cx).position(); + let slot = utility_slot_for_dock_position(position); + self.clear_utility_pane_if_provider(slot, Entity::entity_id(panel), cx); } } @@ -1843,7 +1977,7 @@ impl Workspace { pub fn recent_navigation_history_iter( &self, cx: &App, - ) -> impl Iterator)> { + ) -> impl Iterator)> + use<> { let mut abs_paths_opened: HashMap> = HashMap::default(); let mut history: HashMap, usize)> = HashMap::default(); @@ -1917,6 +2051,12 @@ impl Workspace { .collect() } + pub fn clear_navigation_history(&mut self, _window: &mut Window, cx: &mut Context) { + for pane in &self.panes { + pane.update(cx, |pane, cx| pane.nav_history_mut().clear(cx)); + } + } + fn navigate_history( &mut self, pane: WeakEntity, @@ -1926,7 +2066,7 @@ impl Workspace { ) -> Task> { let to_load = if let Some(pane) = pane.upgrade() { pane.update(cx, |pane, cx| { - window.focus(&pane.focus_handle(cx)); + window.focus(&pane.focus_handle(cx), cx); loop { // Retrieve the weak item handle from the history. let entry = pane.nav_history_mut().pop(mode, cx)?; @@ -2232,7 +2372,7 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, cx); + let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); cx.spawn_in(window, async move |_vh, cx| { let (workspace, _) = task.await?; workspace.update(cx, callback) @@ -2443,6 +2583,12 @@ impl Workspace { .0 .split(' ') .flat_map(|k| Keystroke::parse(k).log_err()) + .map(|k| { + cx.keyboard_mapper() + .map_key_equivalent(k, true) + .inner() + .clone() + }) .collect(); let _ = self.send_keystrokes_impl(keystrokes, window, cx); } @@ -2861,7 +3007,7 @@ impl Workspace { pub fn active_item_as(&self, cx: &App) -> Option> { let item = self.active_item(cx)?; - item.to_any().downcast::().ok() + item.to_any_view().downcast::().ok() } fn active_project_path(&self, cx: &App) -> Option { @@ -3039,7 +3185,7 @@ impl Workspace { } } else { let focus_handle = &active_panel.panel_focus_handle(cx); - window.focus(focus_handle); + window.focus(focus_handle, cx); reveal_dock = true; } } @@ -3051,7 +3197,7 @@ impl Workspace { if focus_center { self.active_pane - .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))) + .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)) } cx.notify(); @@ -3219,7 +3365,7 @@ impl Workspace { if let Some(panel) = panel.as_ref() { if should_focus(&**panel, window, cx) { dock.set_open(true, window, cx); - panel.panel_focus_handle(cx).focus(window); + panel.panel_focus_handle(cx).focus(window, cx); } else { focus_center = true; } @@ -3229,7 +3375,7 @@ impl Workspace { if focus_center { self.active_pane - .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))) + .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)) } result_panel = panel; @@ -3303,7 +3449,7 @@ impl Workspace { if focus_center { self.active_pane - .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))) + .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)) } if self.zoomed_position != dock_to_reveal { @@ -3334,7 +3480,7 @@ impl Workspace { .detach(); self.panes.push(pane.clone()); - window.focus(&pane.focus_handle(cx)); + window.focus(&pane.focus_handle(cx), cx); cx.emit(Event::PaneAdded(pane.clone())); pane @@ -3626,14 +3772,33 @@ impl Workspace { project_item: Entity, activate_pane: bool, focus_item: bool, + keep_old_preview: bool, + allow_new_preview: bool, window: &mut Window, cx: &mut Context, ) -> Entity where T: ProjectItem, { + let old_item_id = pane.read(cx).active_item().map(|item| item.item_id()); + if let Some(item) = self.find_project_item(&pane, &project_item, cx) { + if !keep_old_preview + && let Some(old_id) = old_item_id + && old_id != item.item_id() + { + // switching to a different item, so unpreview old active item + pane.update(cx, |pane, _| { + pane.unpreview_item_if_preview(old_id); + }); + } + self.activate_item(&item, activate_pane, focus_item, window, cx); + if !allow_new_preview { + pane.update(cx, |pane, _| { + pane.unpreview_item_if_preview(item.item_id()); + }); + } return item; } @@ -3642,16 +3807,14 @@ impl Workspace { T::for_project_item(self.project().clone(), Some(pane), project_item, window, cx) }) }); - let item_id = item.item_id(); let mut destination_index = None; pane.update(cx, |pane, cx| { - if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation - && let Some(preview_item_id) = pane.preview_item_id() - && preview_item_id != item_id - { - destination_index = pane.close_current_preview_item(window, cx); + if !keep_old_preview && let Some(old_id) = old_item_id { + pane.unpreview_item_if_preview(old_id); + } + if allow_new_preview { + destination_index = pane.replace_preview_item_id(item.item_id(), window, cx); } - pane.set_preview_item_id(Some(item.item_id()), cx) }); self.add_item( @@ -3712,7 +3875,7 @@ impl Workspace { ) { let panes = self.center.panes(); if let Some(pane) = panes.get(action.0).map(|p| (*p).clone()) { - window.focus(&pane.focus_handle(cx)); + window.focus(&pane.focus_handle(cx), cx); } else { self.split_and_clone(self.active_pane.clone(), SplitDirection::Right, window, cx) .detach(); @@ -3739,7 +3902,7 @@ impl Workspace { let new_pane = self.add_pane(window, cx); if self .center - .split(&split_off_pane, &new_pane, direction) + .split(&split_off_pane, &new_pane, direction, cx) .log_err() .is_none() { @@ -3782,7 +3945,7 @@ impl Workspace { if let Some(ix) = panes.iter().position(|pane| **pane == self.active_pane) { let next_ix = (ix + 1) % panes.len(); let next_pane = panes[next_ix].clone(); - window.focus(&next_pane.focus_handle(cx)); + window.focus(&next_pane.focus_handle(cx), cx); } } @@ -3791,7 +3954,7 @@ impl Workspace { if let Some(ix) = panes.iter().position(|pane| **pane == self.active_pane) { let prev_ix = cmp::min(ix.wrapping_sub(1), panes.len() - 1); let prev_pane = panes[prev_ix].clone(); - window.focus(&prev_pane.focus_handle(cx)); + window.focus(&prev_pane.focus_handle(cx), cx); } } @@ -3887,7 +4050,7 @@ impl Workspace { Some(ActivateInDirectionTarget::Pane(pane)) => { let pane = pane.read(cx); if let Some(item) = pane.active_item() { - item.item_focus_handle(cx).focus(window); + item.item_focus_handle(cx).focus(window, cx); } else { log::error!( "Could not find a focus target when in switching focus in {direction} direction for a pane", @@ -3899,7 +4062,7 @@ impl Workspace { window.defer(cx, move |window, cx| { let dock = dock.read(cx); if let Some(panel) = dock.active_panel() { - panel.panel_focus_handle(cx).focus(window); + panel.panel_focus_handle(cx).focus(window, cx); } else { log::error!("Could not find a focus target when in switching focus in {direction} direction for a {:?} dock", dock.position()); } @@ -3924,7 +4087,7 @@ impl Workspace { let new_pane = self.add_pane(window, cx); if self .center - .split(&self.active_pane, &new_pane, action.direction) + .split(&self.active_pane, &new_pane, action.direction, cx) .log_err() .is_none() { @@ -3978,7 +4141,7 @@ impl Workspace { pub fn swap_pane_in_direction(&mut self, direction: SplitDirection, cx: &mut Context) { if let Some(to) = self.find_pane_in_direction(direction, cx) { - self.center.swap(&self.active_pane, &to); + self.center.swap(&self.active_pane, &to, cx); cx.notify(); } } @@ -3986,7 +4149,7 @@ impl Workspace { pub fn move_pane_to_border(&mut self, direction: SplitDirection, cx: &mut Context) { if self .center - .move_to_border(&self.active_pane, direction) + .move_to_border(&self.active_pane, direction, cx) .unwrap() { cx.notify(); @@ -4016,13 +4179,13 @@ impl Workspace { } } else { self.center - .resize(&self.active_pane, axis, amount, &self.bounds); + .resize(&self.active_pane, axis, amount, &self.bounds, cx); } cx.notify(); } pub fn reset_pane_sizes(&mut self, cx: &mut Context) { - self.center.reset_pane_sizes(); + self.center.reset_pane_sizes(cx); cx.notify(); } @@ -4208,7 +4371,7 @@ impl Workspace { ) -> Entity { let new_pane = self.add_pane(window, cx); self.center - .split(&pane_to_split, &new_pane, split_direction) + .split(&pane_to_split, &new_pane, split_direction, cx) .unwrap(); cx.notify(); new_pane @@ -4228,7 +4391,7 @@ impl Workspace { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx) }); - self.center.split(&pane, &new_pane, direction).unwrap(); + self.center.split(&pane, &new_pane, direction, cx).unwrap(); cx.notify(); } @@ -4253,7 +4416,7 @@ impl Workspace { new_pane.update(cx, |pane, cx| { pane.add_item(clone, true, true, None, window, cx) }); - this.center.split(&pane, &new_pane, direction).unwrap(); + this.center.split(&pane, &new_pane, direction, cx).unwrap(); cx.notify(); new_pane }) @@ -4300,7 +4463,7 @@ impl Workspace { window: &mut Window, cx: &mut Context, ) { - if self.center.remove(&pane).unwrap() { + if self.center.remove(&pane, cx).unwrap() { self.force_remove_pane(&pane, &focus_on, window, cx); self.unfollow_in_pane(&pane, window, cx); self.last_leaders_by_pane.remove(&pane.downgrade()); @@ -4519,7 +4682,7 @@ impl Workspace { // if you're already following, find the right pane and focus it. if let Some(follower_state) = self.follower_states.get(&leader_id) { - window.focus(&follower_state.pane().focus_handle(cx)); + window.focus(&follower_state.pane().focus_handle(cx), cx); return; } @@ -5331,12 +5494,12 @@ impl Workspace { ) { self.panes.retain(|p| p != pane); if let Some(focus_on) = focus_on { - focus_on.update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))); + focus_on.update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)); } else if self.active_pane() == pane { self.panes .last() .unwrap() - .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))); + .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)); } if self.last_active_center_pane == Some(pane.downgrade()) { self.last_active_center_pane = None; @@ -5652,6 +5815,9 @@ impl Workspace { // Swap workspace center group workspace.center = PaneGroup::with_root(center_group); + workspace.center.set_is_center(true); + workspace.center.mark_positions(cx); + if let Some(active_pane) = active_pane { workspace.set_active_pane(&active_pane, window, cx); cx.focus_self(window); @@ -5815,6 +5981,21 @@ impl Workspace { .on_action(cx.listener(|workspace, _: &SwapPaneDown, _, cx| { workspace.swap_pane_in_direction(SplitDirection::Down, cx) })) + .on_action(cx.listener(|workspace, _: &SwapPaneAdjacent, window, cx| { + const DIRECTION_PRIORITY: [SplitDirection; 4] = [ + SplitDirection::Down, + SplitDirection::Up, + SplitDirection::Right, + SplitDirection::Left, + ]; + for dir in DIRECTION_PRIORITY { + if workspace.find_pane_in_direction(dir, cx).is_some() { + workspace.swap_pane_in_direction(dir, cx); + workspace.activate_pane_in_direction(dir.opposite(), window, cx); + break; + } + } + })) .on_action(cx.listener(|workspace, _: &MovePaneLeft, _, cx| { workspace.move_pane_to_border(SplitDirection::Left, cx) })) @@ -5858,6 +6039,11 @@ impl Workspace { workspace.clear_all_notifications(cx); }, )) + .on_action(cx.listener( + |workspace: &mut Workspace, _: &ClearNavigationHistory, window, cx| { + workspace.clear_navigation_history(window, cx); + }, + )) .on_action(cx.listener( |workspace: &mut Workspace, _: &SuppressNotification, _, cx| { if let Some((notification_id, _)) = workspace.notifications.pop() { @@ -5865,6 +6051,27 @@ impl Workspace { } }, )) + .on_action(cx.listener( + |workspace: &mut Workspace, _: &ToggleWorktreeSecurity, window, cx| { + workspace.show_worktree_trust_security_modal(true, window, cx); + }, + )) + .on_action( + cx.listener(|_: &mut Workspace, _: &ClearTrustedWorktrees, _, cx| { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + trusted_worktrees.update(cx, |trusted_worktrees, _| { + trusted_worktrees.clear_trusted_paths() + }); + let clear_task = persistence::DB.clear_trusted_worktrees(); + cx.spawn(async move |_, cx| { + if clear_task.await.log_err().is_some() { + cx.update(|cx| reload(cx)).ok(); + } + }) + .detach(); + } + }), + ) .on_action(cx.listener( |workspace: &mut Workspace, _: &ReopenClosedItem, window, cx| { workspace.reopen_closed_item(window, cx).detach(); @@ -5937,9 +6144,96 @@ impl Workspace { }, )) .on_action(cx.listener(Workspace::toggle_centered_layout)) + .on_action(cx.listener( + |workspace: &mut Workspace, _action: &pane::ActivateNextItem, window, cx| { + if let Some(active_dock) = workspace.active_dock(window, cx) { + let dock = active_dock.read(cx); + if let Some(active_panel) = dock.active_panel() { + if active_panel.pane(cx).is_none() { + let mut recent_pane: Option> = None; + let mut recent_timestamp = 0; + for pane_handle in workspace.panes() { + let pane = pane_handle.read(cx); + for entry in pane.activation_history() { + if entry.timestamp > recent_timestamp { + recent_timestamp = entry.timestamp; + recent_pane = Some(pane_handle.clone()); + } + } + } + + if let Some(pane) = recent_pane { + pane.update(cx, |pane, cx| { + let current_index = pane.active_item_index(); + let items_len = pane.items_len(); + if items_len > 0 { + let next_index = if current_index + 1 < items_len { + current_index + 1 + } else { + 0 + }; + pane.activate_item( + next_index, false, false, window, cx, + ); + } + }); + return; + } + } + } + } + cx.propagate(); + }, + )) + .on_action(cx.listener( + |workspace: &mut Workspace, _action: &pane::ActivatePreviousItem, window, cx| { + if let Some(active_dock) = workspace.active_dock(window, cx) { + let dock = active_dock.read(cx); + if let Some(active_panel) = dock.active_panel() { + if active_panel.pane(cx).is_none() { + let mut recent_pane: Option> = None; + let mut recent_timestamp = 0; + for pane_handle in workspace.panes() { + let pane = pane_handle.read(cx); + for entry in pane.activation_history() { + if entry.timestamp > recent_timestamp { + recent_timestamp = entry.timestamp; + recent_pane = Some(pane_handle.clone()); + } + } + } + + if let Some(pane) = recent_pane { + pane.update(cx, |pane, cx| { + let current_index = pane.active_item_index(); + let items_len = pane.items_len(); + if items_len > 0 { + let prev_index = if current_index > 0 { + current_index - 1 + } else { + items_len.saturating_sub(1) + }; + pane.activate_item( + prev_index, false, false, window, cx, + ); + } + }); + return; + } + } + } + } + cx.propagate(); + }, + )) .on_action(cx.listener(Workspace::cancel)) } + #[cfg(any(test, feature = "test-support"))] + pub fn set_random_database_id(&mut self) { + self.database_id = Some(WorkspaceId(Uuid::new_v4().as_u64_pair().0 as i64)); + } + #[cfg(any(test, feature = "test-support"))] pub fn test_new(project: Entity, window: &mut Window, cx: &mut Context) -> Self { use node_runtime::NodeRuntime; @@ -5963,7 +6257,7 @@ impl Workspace { let workspace = Self::new(Default::default(), project, app_state, window, cx); workspace .active_pane - .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx))); + .update(cx, |pane, cx| window.focus(&pane.focus_handle(cx), cx)); workspace } @@ -6170,6 +6464,7 @@ impl Workspace { left_dock.resize_active_panel(Some(size), window, cx); } }); + self.clamp_utility_pane_widths(window, cx); } fn resize_right_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { @@ -6192,6 +6487,7 @@ impl Workspace { right_dock.resize_active_panel(Some(size), window, cx); } }); + self.clamp_utility_pane_widths(window, cx); } fn resize_bottom_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { @@ -6206,6 +6502,42 @@ impl Workspace { bottom_dock.resize_active_panel(Some(size), window, cx); } }); + self.clamp_utility_pane_widths(window, cx); + } + + fn max_utility_pane_width(&self, window: &Window, cx: &App) -> Pixels { + let left_dock_width = self + .left_dock + .read(cx) + .active_panel_size(window, cx) + .unwrap_or(px(0.0)); + let right_dock_width = self + .right_dock + .read(cx) + .active_panel_size(window, cx) + .unwrap_or(px(0.0)); + let center_pane_width = self.bounds.size.width - left_dock_width - right_dock_width; + center_pane_width - px(10.0) + } + + fn clamp_utility_pane_widths(&mut self, window: &mut Window, cx: &mut App) { + let max_width = self.max_utility_pane_width(window, cx); + + // Clamp left slot utility pane if it exists + if let Some(handle) = self.utility_pane(UtilityPaneSlot::Left) { + let current_width = handle.width(cx); + if current_width > max_width { + handle.set_width(Some(max_width.max(UTILITY_PANE_MIN_WIDTH)), cx); + } + } + + // Clamp right slot utility pane if it exists + if let Some(handle) = self.utility_pane(UtilityPaneSlot::Right) { + let current_width = handle.width(cx); + if current_width > max_width { + handle.set_width(Some(max_width.max(UTILITY_PANE_MIN_WIDTH)), cx); + } + } } fn toggle_edit_predictions_all_files( @@ -6220,6 +6552,48 @@ impl Workspace { file.project.all_languages.defaults.show_edit_predictions = Some(!show_edit_predictions) }); } + + pub fn show_worktree_trust_security_modal( + &mut self, + toggle: bool, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(security_modal) = self.active_modal::(cx) { + if toggle { + security_modal.update(cx, |security_modal, cx| { + security_modal.dismiss(cx); + }) + } else { + security_modal.update(cx, |security_modal, cx| { + security_modal.refresh_restricted_paths(cx); + }); + } + } else { + let has_restricted_worktrees = TrustedWorktrees::try_get_global(cx) + .map(|trusted_worktrees| { + trusted_worktrees + .read(cx) + .has_restricted_worktrees(&self.project().read(cx).worktree_store(), cx) + }) + .unwrap_or(false); + if has_restricted_worktrees { + let project = self.project().read(cx); + let remote_host = project.remote_connection_options(cx); + let worktree_store = project.worktree_store().downgrade(); + self.toggle_modal(window, cx, |_, cx| { + SecurityModal::new(worktree_store, remote_host, cx) + }); + } + } + } + + fn update_worktree_data(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) { + self.update_window_title(window, cx); + self.serialize_workspace(window, cx); + // This event could be triggered by `AddFolderToProject` or `RemoveFromProject`. + self.update_history(cx); + } } fn leader_border_for_pane( @@ -6673,6 +7047,34 @@ impl Render for Workspace { } }, )) + .on_drag_move(cx.listener( + move |workspace, + e: &DragMoveEvent, + window, + cx| { + let slot = e.drag(cx).0; + match slot { + UtilityPaneSlot::Left => { + let left_dock_width = workspace.left_dock.read(cx) + .active_panel_size(window, cx) + .unwrap_or(gpui::px(0.0)); + let new_width = e.event.position.x + - workspace.bounds.left() + - left_dock_width; + workspace.resize_utility_pane(slot, new_width, window, cx); + } + UtilityPaneSlot::Right => { + let right_dock_width = workspace.right_dock.read(cx) + .active_panel_size(window, cx) + .unwrap_or(gpui::px(0.0)); + let new_width = workspace.bounds.right() + - e.event.position.x + - right_dock_width; + workspace.resize_utility_pane(slot, new_width, window, cx); + } + } + }, + )) }) .child({ match bottom_dock_layout { @@ -6692,6 +7094,15 @@ impl Render for Workspace { window, cx, )) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Left), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Left, pane.box_clone(), cx) + ) + }) + }) + }) .child( div() .flex() @@ -6733,6 +7144,15 @@ impl Render for Workspace { ), ), ) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Right), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Right, pane.box_clone(), cx) + ) + }) + }) + }) .children(self.render_dock( DockPosition::Right, &self.right_dock, @@ -6763,6 +7183,15 @@ impl Render for Workspace { .flex_row() .flex_1() .children(self.render_dock(DockPosition::Left, &self.left_dock, window, cx)) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Left), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Left, pane.box_clone(), cx) + ) + }) + }) + }) .child( div() .flex() @@ -6790,6 +7219,13 @@ impl Render for Workspace { .when_some(paddings.1, |this, p| this.child(p.border_l_1())), ) ) + .when_some(self.utility_pane(UtilityPaneSlot::Right), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Right, pane.box_clone(), cx) + ) + }) + }) ) .child( div() @@ -6814,6 +7250,15 @@ impl Render for Workspace { window, cx, )) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Left), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Left, pane.box_clone(), cx) + ) + }) + }) + }) .child( div() .flex() @@ -6852,6 +7297,15 @@ impl Render for Workspace { .when_some(paddings.1, |this, p| this.child(p.border_l_1())), ) ) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Right), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Right, pane.box_clone(), cx) + ) + }) + }) + }) .children(self.render_dock(DockPosition::Right, &self.right_dock, window, cx)) ) .child( @@ -6871,6 +7325,13 @@ impl Render for Workspace { window, cx, )) + .when_some(self.utility_pane(UtilityPaneSlot::Left), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Left, pane.box_clone(), cx) + ) + }) + }) .child( div() .flex() @@ -6908,6 +7369,15 @@ impl Render for Workspace { cx, )), ) + .when(cx.has_flag::(), |this| { + this.when_some(self.utility_pane(UtilityPaneSlot::Right), |this, pane| { + this.when(pane.expanded(cx), |this| { + this.child( + UtilityPaneFrame::new(UtilityPaneSlot::Right, pane.box_clone(), cx) + ) + }) + }) + }) .children(self.render_dock( DockPosition::Right, &self.right_dock, @@ -7124,7 +7594,9 @@ actions!( /// Shares the current project with collaborators. ShareProject, /// Shares your screen with collaborators. - ScreenShare + ScreenShare, + /// Copies the current room name and session id for debugging purposes. + CopyRoomId, ] ); actions!( @@ -7293,14 +7765,9 @@ pub fn join_channel( ) -> Task> { let active_call = ActiveCall::global(cx); cx.spawn(async move |cx| { - let result = join_channel_internal( - channel_id, - &app_state, - requesting_window, - &active_call, - cx, - ) - .await; + let result = + join_channel_internal(channel_id, &app_state, requesting_window, &active_call, cx) + .await; // join channel succeeded, and opened a window if matches!(result, Ok(true)) { @@ -7308,20 +7775,27 @@ pub fn join_channel( } // find an existing workspace to focus and show call controls - let mut active_window = - requesting_window.or_else(|| activate_any_workspace_window( cx)); + let mut active_window = requesting_window.or_else(|| activate_any_workspace_window(cx)); if active_window.is_none() { // no open workspaces, make one to show the error in (blergh) let (window_handle, _) = cx .update(|cx| { - Workspace::new_local(vec![], app_state.clone(), requesting_window, None, cx) + Workspace::new_local( + vec![], + app_state.clone(), + requesting_window, + None, + None, + cx, + ) })? .await?; if result.is_ok() { cx.update(|cx| { cx.dispatch_action(&OpenChannelNotes); - }).log_err(); + }) + .log_err(); } active_window = Some(window_handle); @@ -7333,19 +7807,25 @@ pub fn join_channel( active_window .update(cx, |_, window, cx| { let detail: SharedString = match err.error_code() { - ErrorCode::SignedOut => { - "Please sign in to continue.".into() - } - ErrorCode::UpgradeRequired => { - "Your are running an unsupported version of Zed. Please update to continue.".into() - } - ErrorCode::NoSuchChannel => { - "No matching channel was found. Please check the link and try again.".into() - } - ErrorCode::Forbidden => { - "This channel is private, and you do not have access. Please ask someone to add you and try again.".into() + ErrorCode::SignedOut => "Please sign in to continue.".into(), + ErrorCode::UpgradeRequired => concat!( + "Your are running an unsupported version of Zed. ", + "Please update to continue." + ) + .into(), + ErrorCode::NoSuchChannel => concat!( + "No matching channel was found. ", + "Please check the link and try again." + ) + .into(), + ErrorCode::Forbidden => concat!( + "This channel is private, and you do not have access. ", + "Please ask someone to add you and try again." + ) + .into(), + ErrorCode::Disconnected => { + "Please check your internet connection and try again.".into() } - ErrorCode::Disconnected => "Please check your internet connection and try again.".into(), _ => format!("{}\n\nPlease try again.", err).into(), }; window.prompt( @@ -7353,7 +7833,8 @@ pub fn join_channel( "Failed to join channel", Some(&detail), &["Ok"], - cx) + cx, + ) })? .await .ok(); @@ -7372,7 +7853,7 @@ pub async fn get_any_active_workspace( // find an existing workspace to focus and show call controls let active_window = activate_any_workspace_window(&mut cx); if active_window.is_none() { - cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, cx))? + cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx))? .await?; } activate_any_workspace_window(&mut cx).context("could not open zed") @@ -7539,6 +8020,7 @@ pub fn open_paths( app_state.clone(), open_options.replace_window, open_options.env, + None, cx, ) })? @@ -7583,14 +8065,17 @@ pub fn open_new( cx: &mut App, init: impl FnOnce(&mut Workspace, &mut Window, &mut Context) + 'static + Send, ) -> Task> { - let task = Workspace::new_local(Vec::new(), app_state, None, open_options.env, cx); - cx.spawn(async move |cx| { - let (workspace, opened_paths) = task.await?; - workspace.update(cx, |workspace, window, cx| { - if opened_paths.is_empty() { - init(workspace, window, cx) - } - })?; + let task = Workspace::new_local( + Vec::new(), + app_state, + None, + open_options.env, + Some(Box::new(init)), + cx, + ); + cx.spawn(async move |_cx| { + let (_workspace, _opened_paths) = task.await?; + // Init callback is called synchronously during workspace creation Ok(()) }) } @@ -7643,7 +8128,7 @@ pub fn open_remote_project_with_new_connection( ) -> Task>>>> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = - serialize_remote_project(remote_connection.connection_options(), paths.clone(), cx) + deserialize_remote_project(remote_connection.connection_options(), paths.clone(), cx) .await?; let session = match cx @@ -7670,6 +8155,7 @@ pub fn open_remote_project_with_new_connection( app_state.user_store.clone(), app_state.languages.clone(), app_state.fs.clone(), + true, cx, ) })?; @@ -7697,7 +8183,7 @@ pub fn open_remote_project_with_existing_connection( ) -> Task>>>> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = - serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; + deserialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; open_remote_project_inner( project, @@ -7799,7 +8285,7 @@ async fn open_remote_project_inner( Ok(items.into_iter().map(|item| item?.ok()).collect()) } -fn serialize_remote_project( +fn deserialize_remote_project( connection_options: RemoteConnectionOptions, paths: Vec, cx: &AsyncApp, @@ -8245,7 +8731,7 @@ fn move_all_items( // This automatically removes duplicate items in the pane to_pane.update(cx, |destination, cx| { destination.add_item(item_handle, true, true, None, window, cx); - window.focus(&destination.focus_handle(cx)) + window.focus(&destination.focus_handle(cx), cx) }); } } @@ -8289,7 +8775,7 @@ pub fn move_item( cx, ); if activate { - window.focus(&destination.focus_handle(cx)) + window.focus(&destination.focus_handle(cx), cx) } }); } @@ -8391,14 +8877,13 @@ pub fn remote_workspace_position_from_db( } else { let restorable_bounds = serialized_workspace .as_ref() - .and_then(|workspace| Some((workspace.display?, workspace.window_bounds?))) - .or_else(|| { - let (display, window_bounds) = DB.last_window().log_err()?; - Some((display?, window_bounds?)) - }); + .and_then(|workspace| { + Some((workspace.display?, workspace.window_bounds.map(|b| b.0)?)) + }) + .or_else(|| persistence::read_default_window_bounds()); - if let Some((serialized_display, serialized_status)) = restorable_bounds { - (Some(serialized_status.0), Some(serialized_display)) + if let Some((serialized_display, serialized_bounds)) = restorable_bounds { + (Some(serialized_bounds), Some(serialized_display)) } else { (None, None) } @@ -9161,7 +9646,7 @@ mod tests { cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let panel = workspace.update_in(cx, |workspace, window, cx| { - let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, cx)); + let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 100, cx)); workspace.add_panel(panel.clone(), window, cx); workspace @@ -9301,6 +9786,105 @@ mod tests { }); } + #[gpui::test] + async fn test_pane_zoom_in_out(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + let pane = workspace.update_in(cx, |workspace, _window, _cx| { + workspace.active_pane().clone() + }); + + // Add an item to the pane so it can be zoomed + workspace.update_in(cx, |workspace, window, cx| { + let item = cx.new(TestItem::new); + workspace.add_item(pane.clone(), Box::new(item), None, true, true, window, cx); + }); + + // Initially not zoomed + workspace.update_in(cx, |workspace, _window, cx| { + assert!(!pane.read(cx).is_zoomed(), "Pane starts unzoomed"); + assert!( + workspace.zoomed.is_none(), + "Workspace should track no zoomed pane" + ); + assert!(pane.read(cx).items_len() > 0, "Pane should have items"); + }); + + // Zoom In + pane.update_in(cx, |pane, window, cx| { + pane.zoom_in(&crate::ZoomIn, window, cx); + }); + + workspace.update_in(cx, |workspace, window, cx| { + assert!( + pane.read(cx).is_zoomed(), + "Pane should be zoomed after ZoomIn" + ); + assert!( + workspace.zoomed.is_some(), + "Workspace should track the zoomed pane" + ); + assert!( + pane.read(cx).focus_handle(cx).contains_focused(window, cx), + "ZoomIn should focus the pane" + ); + }); + + // Zoom In again is a no-op + pane.update_in(cx, |pane, window, cx| { + pane.zoom_in(&crate::ZoomIn, window, cx); + }); + + workspace.update_in(cx, |workspace, window, cx| { + assert!(pane.read(cx).is_zoomed(), "Second ZoomIn keeps pane zoomed"); + assert!( + workspace.zoomed.is_some(), + "Workspace still tracks zoomed pane" + ); + assert!( + pane.read(cx).focus_handle(cx).contains_focused(window, cx), + "Pane remains focused after repeated ZoomIn" + ); + }); + + // Zoom Out + pane.update_in(cx, |pane, window, cx| { + pane.zoom_out(&crate::ZoomOut, window, cx); + }); + + workspace.update_in(cx, |workspace, _window, cx| { + assert!( + !pane.read(cx).is_zoomed(), + "Pane should unzoom after ZoomOut" + ); + assert!( + workspace.zoomed.is_none(), + "Workspace clears zoom tracking after ZoomOut" + ); + }); + + // Zoom Out again is a no-op + pane.update_in(cx, |pane, window, cx| { + pane.zoom_out(&crate::ZoomOut, window, cx); + }); + + workspace.update_in(cx, |workspace, _window, cx| { + assert!( + !pane.read(cx).is_zoomed(), + "Second ZoomOut keeps pane unzoomed" + ); + assert!( + workspace.zoomed.is_none(), + "Workspace remains without zoomed pane" + ); + }); + } + #[gpui::test] async fn test_toggle_all_docks(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -9394,10 +9978,10 @@ mod tests { // Open two docks (left and right) with one panel each let (left_panel, right_panel) = workspace.update_in(cx, |workspace, window, cx| { - let left_panel = cx.new(|cx| TestPanel::new(DockPosition::Left, cx)); + let left_panel = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); workspace.add_panel(left_panel.clone(), window, cx); - let right_panel = cx.new(|cx| TestPanel::new(DockPosition::Right, cx)); + let right_panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 101, cx)); workspace.add_panel(right_panel.clone(), window, cx); workspace.toggle_dock(DockPosition::Left, window, cx); @@ -9825,10 +10409,10 @@ mod tests { cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let (panel_1, panel_2) = workspace.update_in(cx, |workspace, window, cx| { - let panel_1 = cx.new(|cx| TestPanel::new(DockPosition::Left, cx)); + let panel_1 = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); workspace.add_panel(panel_1.clone(), window, cx); workspace.toggle_dock(DockPosition::Left, window, cx); - let panel_2 = cx.new(|cx| TestPanel::new(DockPosition::Right, cx)); + let panel_2 = cx.new(|cx| TestPanel::new(DockPosition::Right, 101, cx)); workspace.add_panel(panel_2.clone(), window, cx); workspace.toggle_dock(DockPosition::Right, window, cx); @@ -10735,7 +11319,7 @@ mod tests { // Add a new panel to the right dock, opening the dock and setting the // focus to the new panel. let panel = workspace.update_in(cx, |workspace, window, cx| { - let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, cx)); + let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 100, cx)); workspace.add_panel(panel.clone(), window, cx); workspace @@ -11199,7 +11783,7 @@ mod tests { // Now we can check if the handle we got back errored or not assert_eq!( - handle.to_any().entity_type(), + handle.to_any_view().entity_type(), TypeId::of::() ); @@ -11212,7 +11796,7 @@ mod tests { .unwrap(); assert_eq!( - handle.to_any().entity_type(), + handle.to_any_view().entity_type(), TypeId::of::() ); @@ -11261,7 +11845,7 @@ mod tests { // This _must_ be the second item registered assert_eq!( - handle.to_any().entity_type(), + handle.to_any_view().entity_type(), TypeId::of::() ); diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 24958df7c6d5d36fee243022d700ccf56a570a19..4ce0394fe5fdc74754c1147138cb33c67e076d88 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -31,6 +31,7 @@ pub struct WorkspaceSettings { pub close_on_file_delete: bool, pub use_system_window_tabs: bool, pub zoomed_padding: bool, + pub window_decorations: settings::WindowDecorations, } #[derive(Copy, Clone, PartialEq, Debug, Default)] @@ -105,6 +106,7 @@ impl Settings for WorkspaceSettings { close_on_file_delete: workspace.close_on_file_delete.unwrap(), use_system_window_tabs: workspace.use_system_window_tabs.unwrap(), zoomed_padding: workspace.zoomed_padding.unwrap(), + window_decorations: workspace.window_decorations.unwrap(), } } } diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 6d132fbd2cb8c7a1282bffcea6577260a15c4572..e7d3ac34e1886bd76e0a0f5d23ea981b6626909a 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -25,8 +25,10 @@ test-support = [ [dependencies] anyhow.workspace = true async-lock.workspace = true +chardetng.workspace = true clock.workspace = true collections.workspace = true +encoding_rs.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true diff --git a/crates/worktree/src/ignore.rs b/crates/worktree/src/ignore.rs index 17c362e2d7f78384fe3b9b444353d302c4dac4c5..87487c36df6dc4eca3da43eaab95f83847ba5d1f 100644 --- a/crates/worktree/src/ignore.rs +++ b/crates/worktree/src/ignore.rs @@ -13,6 +13,10 @@ pub enum IgnoreStackEntry { Global { ignore: Arc, }, + RepoExclude { + ignore: Arc, + parent: Arc, + }, Some { abs_base_path: Arc, ignore: Arc, @@ -21,6 +25,12 @@ pub enum IgnoreStackEntry { All, } +#[derive(Debug)] +pub enum IgnoreKind { + Gitignore(Arc), + RepoExclude, +} + impl IgnoreStack { pub fn none() -> Self { Self { @@ -43,13 +53,19 @@ impl IgnoreStack { } } - pub fn append(self, abs_base_path: Arc, ignore: Arc) -> Self { + pub fn append(self, kind: IgnoreKind, ignore: Arc) -> Self { let top = match self.top.as_ref() { IgnoreStackEntry::All => self.top.clone(), - _ => Arc::new(IgnoreStackEntry::Some { - abs_base_path, - ignore, - parent: self.top.clone(), + _ => Arc::new(match kind { + IgnoreKind::Gitignore(abs_base_path) => IgnoreStackEntry::Some { + abs_base_path, + ignore, + parent: self.top.clone(), + }, + IgnoreKind::RepoExclude => IgnoreStackEntry::RepoExclude { + ignore, + parent: self.top.clone(), + }, }), }; Self { @@ -84,6 +100,17 @@ impl IgnoreStack { ignore::Match::Whitelist(_) => false, } } + IgnoreStackEntry::RepoExclude { ignore, parent } => { + match ignore.matched(abs_path, is_dir) { + ignore::Match::None => IgnoreStack { + repo_root: self.repo_root.clone(), + top: parent.clone(), + } + .is_abs_path_ignored(abs_path, is_dir), + ignore::Match::Ignore(_) => true, + ignore::Match::Whitelist(_) => false, + } + } IgnoreStackEntry::Some { abs_base_path, ignore, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 69fee07583a33106689c463732fe6defbdcfbb40..7145bccd514fbb5d6093efda765a826162c91260 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -5,8 +5,10 @@ mod worktree_tests; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{Context as _, Result, anyhow}; +use chardetng::EncodingDetector; use clock::ReplicaId; use collections::{HashMap, HashSet, VecDeque}; +use encoding_rs::Encoding; use fs::{Fs, MTime, PathEvent, RemoveOptions, Watcher, copy_recursive, read_dir_items}; use futures::{ FutureExt as _, Stream, StreamExt, @@ -14,15 +16,17 @@ use futures::{ mpsc::{self, UnboundedSender}, oneshot, }, - select_biased, + select_biased, stream, task::Poll, }; use fuzzy::CharBag; use git::{ - COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, INDEX_LOCK, LFS_DIR, status::GitSummary, + COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, INDEX_LOCK, LFS_DIR, REPO_EXCLUDE, + status::GitSummary, }; use gpui::{ - App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, + App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Priority, + Task, }; use ignore::IgnoreStack; use language::DiskState; @@ -52,7 +56,7 @@ use std::{ fmt, future::Future, mem::{self}, - ops::{Deref, DerefMut}, + ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, pin::Pin, sync::{ @@ -70,6 +74,8 @@ use util::{ }; pub use worktree_settings::WorktreeSettings; +use crate::ignore::IgnoreKind; + pub const FS_WATCH_LATENCY: Duration = Duration::from_millis(100); /// A set of local or remote files that are being opened as part of a project. @@ -97,9 +103,12 @@ pub enum CreatedEntry { Excluded { abs_path: PathBuf }, } +#[derive(Debug)] pub struct LoadedFile { pub file: Arc, pub text: String, + pub encoding: &'static Encoding, + pub has_bom: bool, } pub struct LoadedBinaryFile { @@ -129,6 +138,7 @@ pub struct LocalWorktree { next_entry_id: Arc, settings: WorktreeSettings, share_private_files: bool, + scanning_enabled: bool, } pub struct PathPrefixScanRequest { @@ -230,6 +240,9 @@ impl Default for WorkDirectory { pub struct LocalSnapshot { snapshot: Snapshot, global_gitignore: Option>, + /// Exclude files for all git repositories in the worktree, indexed by their absolute path. + /// The boolean indicates whether the gitignore needs to be updated. + repo_exclude_by_work_dir_abs_path: HashMap, (Arc, bool)>, /// All of the gitignore files in the worktree, indexed by their absolute path. /// The boolean indicates whether the gitignore needs to be updated. ignores_by_parent_abs_path: HashMap, (Arc, bool)>, @@ -356,6 +369,7 @@ impl Worktree { visible: bool, fs: Arc, next_entry_id: Arc, + scanning_enabled: bool, cx: &mut AsyncApp, ) -> Result> { let abs_path = path.into(); @@ -389,6 +403,7 @@ impl Worktree { let mut snapshot = LocalSnapshot { ignores_by_parent_abs_path: Default::default(), global_gitignore: Default::default(), + repo_exclude_by_work_dir_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot::new( cx.entity_id().as_u64(), @@ -428,7 +443,7 @@ impl Worktree { let mut entry = Entry::new( RelPath::empty().into(), &metadata, - &next_entry_id, + ProjectEntryId::new(&next_entry_id), snapshot.root_char_bag, None, ); @@ -459,6 +474,7 @@ impl Worktree { fs_case_sensitive, visible, settings, + scanning_enabled, }; worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx); Worktree::Local(worktree) @@ -729,10 +745,14 @@ impl Worktree { path: Arc, text: Rope, line_ending: LineEnding, + encoding: &'static Encoding, + has_bom: bool, cx: &Context, ) -> Task>> { match self { - Worktree::Local(this) => this.write_file(path, text, line_ending, cx), + Worktree::Local(this) => { + this.write_file(path, text, line_ending, encoding, has_bom, cx) + } Worktree::Remote(_) => { Task::ready(Err(anyhow!("remote worktree can't yet write files"))) } @@ -999,7 +1019,7 @@ impl Worktree { }; if worktree_relative_path.components().next().is_some() { - full_path_string.push_str(self.path_style.separator()); + full_path_string.push_str(self.path_style.primary_separator()); full_path_string.push_str(&worktree_relative_path.display(self.path_style)); } @@ -1049,13 +1069,18 @@ impl LocalWorktree { let share_private_files = self.share_private_files; let next_entry_id = self.next_entry_id.clone(); let fs = self.fs.clone(); + let scanning_enabled = self.scanning_enabled; let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_spawn({ let abs_path = snapshot.abs_path.as_path().to_path_buf(); let background = cx.background_executor().clone(); async move { - let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await; + let (events, watcher) = if scanning_enabled { + fs.watch(&abs_path, FS_WATCH_LATENCY).await + } else { + (Box::pin(stream::pending()) as _, Arc::new(NullWatcher) as _) + }; let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| { log::error!("Failed to determine whether filesystem is case sensitive: {e:#}"); true @@ -1080,6 +1105,7 @@ impl LocalWorktree { }), phase: BackgroundScannerPhase::InitialScan, share_private_files, + scanning_enabled, settings, watcher, }; @@ -1333,7 +1359,9 @@ impl LocalWorktree { anyhow::bail!("File is too large to load"); } } - let text = fs.load(&abs_path).await?; + + let content = fs.load_bytes(&abs_path).await?; + let (text, encoding, has_bom) = decode_byte(content); let worktree = this.upgrade().context("worktree was dropped")?; let file = match entry.await? { @@ -1361,7 +1389,12 @@ impl LocalWorktree { } }; - Ok(LoadedFile { file, text }) + Ok(LoadedFile { + file, + text, + encoding, + has_bom, + }) }) } @@ -1444,6 +1477,8 @@ impl LocalWorktree { path: Arc, text: Rope, line_ending: LineEnding, + encoding: &'static Encoding, + has_bom: bool, cx: &Context, ) -> Task>> { let fs = self.fs.clone(); @@ -1453,7 +1488,49 @@ impl LocalWorktree { let write = cx.background_spawn({ let fs = fs.clone(); let abs_path = abs_path.clone(); - async move { fs.save(&abs_path, &text, line_ending).await } + async move { + let bom_bytes = if has_bom { + if encoding == encoding_rs::UTF_16LE { + vec![0xFF, 0xFE] + } else if encoding == encoding_rs::UTF_16BE { + vec![0xFE, 0xFF] + } else if encoding == encoding_rs::UTF_8 { + vec![0xEF, 0xBB, 0xBF] + } else { + vec![] + } + } else { + vec![] + }; + + // For UTF-8, use the optimized `fs.save` which writes Rope chunks directly to disk + // without allocating a contiguous string. + if encoding == encoding_rs::UTF_8 && !has_bom { + return fs.save(&abs_path, &text, line_ending).await; + } + // For legacy encodings (e.g. Shift-JIS), we fall back to converting the entire Rope + // to a String/Bytes in memory before writing. + // + // Note: This is inefficient for very large files compared to the streaming approach above, + // but supporting streaming writes for arbitrary encodings would require a significant + // refactor of the `fs` crate to expose a Writer interface. + let text_string = text.to_string(); + let normalized_text = match line_ending { + LineEnding::Unix => text_string, + LineEnding::Windows => text_string.replace('\n', "\r\n"), + }; + + let (cow, _, _) = encoding.encode(&normalized_text); + let bytes = if !bom_bytes.is_empty() { + let mut bytes = bom_bytes; + bytes.extend_from_slice(&cow); + bytes.into() + } else { + cow + }; + + fs.write(&abs_path, &bytes).await + } }); cx.spawn(async move |this, cx| { @@ -2108,8 +2185,8 @@ impl Snapshot { if path.file_name().is_some() { let mut abs_path = self.abs_path.to_string(); for component in path.components() { - if !abs_path.ends_with(self.path_style.separator()) { - abs_path.push_str(self.path_style.separator()); + if !abs_path.ends_with(self.path_style.primary_separator()) { + abs_path.push_str(self.path_style.primary_separator()); } abs_path.push_str(component); } @@ -2384,6 +2461,36 @@ impl Snapshot { }) } + /// Resolves a path to an executable using the following heuristics: + /// + /// 1. If the path starts with `~`, it is expanded to the user's home directory. + /// 2. If the path is relative and contains more than one component, + /// it is joined to the worktree root path. + /// 3. If the path is relative and exists in the worktree + /// (even if falls under an exclusion filter), + /// it is joined to the worktree root path. + /// 4. Otherwise the path is returned unmodified. + /// + /// Relative paths that do not exist in the worktree may + /// still be found using the `PATH` environment variable. + pub fn resolve_executable_path(&self, path: PathBuf) -> PathBuf { + if let Some(path_str) = path.to_str() { + if let Some(remaining_path) = path_str.strip_prefix("~/") { + return home_dir().join(remaining_path); + } else if path_str == "~" { + return home_dir().to_path_buf(); + } + } + + if let Ok(rel_path) = RelPath::new(&path, self.path_style) + && (path.components().count() > 1 || self.entry_for_path(&rel_path).is_some()) + { + self.abs_path().join(path) + } else { + path + } + } + pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> { let entry = self.entries_by_id.get(&id, ())?; self.entry_for_path(&entry.path) @@ -2524,13 +2631,21 @@ impl LocalSnapshot { } else { IgnoreStack::none() }; + + if let Some((repo_exclude, _)) = repo_root + .as_ref() + .and_then(|abs_path| self.repo_exclude_by_work_dir_abs_path.get(abs_path)) + { + ignore_stack = ignore_stack.append(IgnoreKind::RepoExclude, repo_exclude.clone()); + } ignore_stack.repo_root = repo_root; for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { if ignore_stack.is_abs_path_ignored(parent_abs_path, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { - ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore); + ignore_stack = + ignore_stack.append(IgnoreKind::Gitignore(parent_abs_path.into()), ignore); } } @@ -2706,13 +2821,30 @@ impl BackgroundScannerState { } } - async fn insert_entry( + fn entry_id_for( &mut self, - mut entry: Entry, - fs: &dyn Fs, - watcher: &dyn Watcher, - ) -> Entry { - self.reuse_entry_id(&mut entry); + next_entry_id: &AtomicUsize, + path: &RelPath, + metadata: &fs::Metadata, + ) -> ProjectEntryId { + // If an entry with the same inode was removed from the worktree during this scan, + // then it *might* represent the same file or directory. But the OS might also have + // re-used the inode for a completely different file or directory. + // + // Conditionally reuse the old entry's id: + // * if the mtime is the same, the file was probably been renamed. + // * if the path is the same, the file may just have been updated + if let Some(removed_entry) = self.removed_entries.remove(&metadata.inode) { + if removed_entry.mtime == Some(metadata.mtime) || *removed_entry.path == *path { + return removed_entry.id; + } + } else if let Some(existing_entry) = self.snapshot.entry_for_path(path) { + return existing_entry.id; + } + ProjectEntryId::new(next_entry_id) + } + + async fn insert_entry(&mut self, entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry { let entry = self.snapshot.insert_entry(entry, fs); if entry.path.file_name() == Some(&DOT_GIT) { self.insert_git_repository(entry.path.clone(), fs, watcher) @@ -3359,13 +3491,13 @@ impl Entry { fn new( path: Arc, metadata: &fs::Metadata, - next_entry_id: &AtomicUsize, + id: ProjectEntryId, root_char_bag: CharBag, canonical_path: Option>, ) -> Self { let char_bag = char_bag_for_path(root_char_bag, &path); Self { - id: ProjectEntryId::new(next_entry_id), + id, kind: if metadata.is_dir { EntryKind::PendingDir } else { @@ -3570,6 +3702,7 @@ struct BackgroundScanner { watcher: Arc, settings: WorktreeSettings, share_private_files: bool, + scanning_enabled: bool, } #[derive(Copy, Clone, PartialEq)] @@ -3585,14 +3718,33 @@ impl BackgroundScanner { // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. let root_abs_path = self.state.lock().await.snapshot.abs_path.clone(); - let (ignores, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await; - self.state - .lock() - .await - .snapshot - .ignores_by_parent_abs_path - .extend(ignores); - let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo { + + let repo = if self.scanning_enabled { + let (ignores, exclude, repo) = + discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await; + self.state + .lock() + .await + .snapshot + .ignores_by_parent_abs_path + .extend(ignores); + if let Some(exclude) = exclude { + self.state + .lock() + .await + .snapshot + .repo_exclude_by_work_dir_abs_path + .insert(root_abs_path.as_path().into(), (exclude, false)); + } + + repo + } else { + None + }; + + let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo + && self.scanning_enabled + { maybe!(async { self.state .lock() @@ -3616,6 +3768,7 @@ impl BackgroundScanner { let mut global_gitignore_events = if let Some(global_gitignore_path) = &paths::global_gitignore_path() + && self.scanning_enabled { let is_file = self.fs.is_file(&global_gitignore_path).await; self.state.lock().await.snapshot.global_gitignore = if is_file { @@ -3652,11 +3805,13 @@ impl BackgroundScanner { .await; if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) { root_entry.is_ignored = true; + let mut root_entry = root_entry.clone(); + state.reuse_entry_id(&mut root_entry); state - .insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()) + .insert_entry(root_entry, self.fs.as_ref(), self.watcher.as_ref()) .await; } - if root_entry.is_dir() { + if root_entry.is_dir() && self.scanning_enabled { state .enqueue_scan_dir( root_abs_path.as_path().into(), @@ -3765,7 +3920,7 @@ impl BackgroundScanner { let root_canonical_path = match &root_canonical_path { Ok(path) => SanitizedPath::new(path), Err(err) => { - log::error!("failed to canonicalize root path {root_path:?}: {err}"); + log::error!("failed to canonicalize root path {root_path:?}: {err:#}"); return true; } }; @@ -3843,33 +3998,40 @@ impl BackgroundScanner { let mut relative_paths = Vec::with_capacity(abs_paths.len()); let mut dot_git_abs_paths = Vec::new(); + let mut work_dirs_needing_exclude_update = Vec::new(); abs_paths.sort_unstable(); abs_paths.dedup_by(|a, b| a.starts_with(b)); { let snapshot = &self.state.lock().await.snapshot; - abs_paths.retain(|abs_path| { - let abs_path = &SanitizedPath::new(abs_path); + let mut ranges_to_drop = SmallVec::<[Range; 4]>::new(); - { - let mut is_git_related = false; + fn skip_ix(ranges: &mut SmallVec<[Range; 4]>, ix: usize) { + if let Some(last_range) = ranges.last_mut() + && last_range.end == ix + { + last_range.end += 1; + } else { + ranges.push(ix..ix + 1); + } + } + + for (ix, abs_path) in abs_paths.iter().enumerate() { + let abs_path = &SanitizedPath::new(&abs_path); - let dot_git_paths = self.executor.block(maybe!(async { - let mut path = None; - for ancestor in abs_path.as_path().ancestors() { + let mut is_git_related = false; + let mut dot_git_paths = None; + for ancestor in abs_path.as_path().ancestors() { if is_git_dir(ancestor, self.fs.as_ref()).await { let path_in_git_dir = abs_path .as_path() .strip_prefix(ancestor) .expect("stripping off the ancestor"); - path = Some((ancestor.to_owned(), path_in_git_dir.to_owned())); - break; - } + dot_git_paths = Some((ancestor.to_owned(), path_in_git_dir.to_owned())); + break; } - path - - })); + } if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths { if skipped_files_in_dot_git @@ -3879,8 +4041,11 @@ impl BackgroundScanner { path_in_git_dir.starts_with(skipped_git_subdir) }) { - log::debug!("ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories"); - return false; + log::debug!( + "ignoring event {abs_path:?} as it's in the .git directory among skipped files or directories" + ); + skip_ix(&mut ranges_to_drop, ix); + continue; } is_git_related = true; @@ -3889,8 +4054,7 @@ impl BackgroundScanner { } } - let relative_path = if let Ok(path) = - abs_path.strip_prefix(&root_canonical_path) + let relative_path = if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) && let Ok(path) = RelPath::new(path, PathStyle::local()) { path @@ -3901,12 +4065,25 @@ impl BackgroundScanner { ); } else { log::error!( - "ignoring event {abs_path:?} outside of root path {root_canonical_path:?}", + "ignoring event {abs_path:?} outside of root path {root_canonical_path:?}", ); } - return false; + skip_ix(&mut ranges_to_drop, ix); + continue; }; + let absolute_path = abs_path.to_path_buf(); + if absolute_path.ends_with(Path::new(DOT_GIT).join(REPO_EXCLUDE)) { + if let Some(repository) = snapshot + .git_repositories + .values() + .find(|repo| repo.common_dir_abs_path.join(REPO_EXCLUDE) == absolute_path) + { + work_dirs_needing_exclude_update + .push(repository.work_directory_abs_path.clone()); + } + } + if abs_path.file_name() == Some(OsStr::new(GITIGNORE)) { for (_, repo) in snapshot .git_repositories @@ -3928,25 +4105,43 @@ impl BackgroundScanner { }); if !parent_dir_is_loaded { log::debug!("ignoring event {relative_path:?} within unloaded directory"); - return false; + skip_ix(&mut ranges_to_drop, ix); + continue; } if self.settings.is_path_excluded(&relative_path) { if !is_git_related { log::debug!("ignoring FS event for excluded path {relative_path:?}"); } - return false; + skip_ix(&mut ranges_to_drop, ix); + continue; } relative_paths.push(relative_path.into_arc()); - true } - }); + + for range_to_drop in ranges_to_drop.into_iter().rev() { + abs_paths.drain(range_to_drop); + } } + if relative_paths.is_empty() && dot_git_abs_paths.is_empty() { return; } + if !work_dirs_needing_exclude_update.is_empty() { + let mut state = self.state.lock().await; + for work_dir_abs_path in work_dirs_needing_exclude_update { + if let Some((_, needs_update)) = state + .snapshot + .repo_exclude_by_work_dir_abs_path + .get_mut(&work_dir_abs_path) + { + *needs_update = true; + } + } + } + self.state.lock().await.snapshot.scan_id += 1; let (scan_job_tx, scan_job_rx) = channel::unbounded(); @@ -4060,7 +4255,7 @@ impl BackgroundScanner { let progress_update_count = AtomicUsize::new(0); self.executor - .scoped(|scope| { + .scoped_priority(Priority::Low, |scope| { for _ in 0..self.executor.num_cpus() { scope.spawn(async { let mut last_progress_update_count = 0; @@ -4214,7 +4409,8 @@ impl BackgroundScanner { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { Ok(ignore) => { let ignore = Arc::new(ignore); - ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone()); + ignore_stack = ignore_stack + .append(IgnoreKind::Gitignore(job.abs_path.clone()), ignore.clone()); new_ignore = Some(ignore); } Err(error) => { @@ -4245,7 +4441,7 @@ impl BackgroundScanner { let mut child_entry = Entry::new( child_path.clone(), &child_metadata, - &next_entry_id, + ProjectEntryId::new(&next_entry_id), root_char_bag, None, ); @@ -4432,10 +4628,11 @@ impl BackgroundScanner { .ignore_stack_for_abs_path(&abs_path, metadata.is_dir, self.fs.as_ref()) .await; let is_external = !canonical_path.starts_with(&root_canonical_path); + let entry_id = state.entry_id_for(self.next_entry_id.as_ref(), path, &metadata); let mut fs_entry = Entry::new( path.clone(), &metadata, - self.next_entry_id.as_ref(), + entry_id, state.snapshot.root_char_bag, if metadata.is_symlink { Some(canonical_path.as_path().to_path_buf().into()) @@ -4475,11 +4672,24 @@ impl BackgroundScanner { .await; if path.is_empty() - && let Some((ignores, repo)) = new_ancestor_repo.take() + && let Some((ignores, exclude, repo)) = new_ancestor_repo.take() { log::trace!("updating ancestor git repository"); state.snapshot.ignores_by_parent_abs_path.extend(ignores); if let Some((ancestor_dot_git, work_directory)) = repo { + if let Some(exclude) = exclude { + let work_directory_abs_path = self + .state + .lock() + .await + .snapshot + .work_directory_abs_path(&work_directory); + + state + .snapshot + .repo_exclude_by_work_dir_abs_path + .insert(work_directory_abs_path.into(), (exclude, false)); + } state .insert_git_repository_for_path( work_directory, @@ -4577,6 +4787,36 @@ impl BackgroundScanner { { let snapshot = &mut self.state.lock().await.snapshot; let abs_path = snapshot.abs_path.clone(); + + snapshot.repo_exclude_by_work_dir_abs_path.retain( + |work_dir_abs_path, (exclude, needs_update)| { + if *needs_update { + *needs_update = false; + ignores_to_update.push(work_dir_abs_path.clone()); + + if let Some((_, repository)) = snapshot + .git_repositories + .iter() + .find(|(_, repo)| &repo.work_directory_abs_path == work_dir_abs_path) + { + let exclude_abs_path = + repository.common_dir_abs_path.join(REPO_EXCLUDE); + if let Ok(current_exclude) = self + .executor + .block(build_gitignore(&exclude_abs_path, self.fs.as_ref())) + { + *exclude = Arc::new(current_exclude); + } + } + } + + snapshot + .git_repositories + .iter() + .any(|(_, repo)| &repo.work_directory_abs_path == work_dir_abs_path) + }, + ); + snapshot .ignores_by_parent_abs_path .retain(|parent_abs_path, (_, needs_update)| { @@ -4631,7 +4871,8 @@ impl BackgroundScanner { let mut ignore_stack = job.ignore_stack; if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) { - ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone()); + ignore_stack = + ignore_stack.append(IgnoreKind::Gitignore(job.abs_path.clone()), ignore.clone()); } let mut entries_by_id_edits = Vec::new(); @@ -4806,6 +5047,9 @@ impl BackgroundScanner { let preserve = ids_to_preserve.contains(work_directory_id); if !preserve { affected_repo_roots.push(entry.dot_git_abs_path.parent().unwrap().into()); + snapshot + .repo_exclude_by_work_dir_abs_path + .remove(&entry.work_directory_abs_path); } preserve }); @@ -4845,8 +5089,10 @@ async fn discover_ancestor_git_repo( root_abs_path: &SanitizedPath, ) -> ( HashMap, (Arc, bool)>, + Option>, Option<(PathBuf, WorkDirectory)>, ) { + let mut exclude = None; let mut ignores = HashMap::default(); for (index, ancestor) in root_abs_path.as_path().ancestors().enumerate() { if index != 0 { @@ -4882,6 +5128,7 @@ async fn discover_ancestor_git_repo( // also mark where in the git repo the root folder is located. return ( ignores, + exclude, Some(( ancestor_dot_git, WorkDirectory::AboveProject { @@ -4893,12 +5140,17 @@ async fn discover_ancestor_git_repo( }; } + let repo_exclude_abs_path = ancestor_dot_git.join(REPO_EXCLUDE); + if let Ok(repo_exclude) = build_gitignore(&repo_exclude_abs_path, fs.as_ref()).await { + exclude = Some(Arc::new(repo_exclude)); + } + // Reached root of git repository. break; } } - (ignores, None) + (ignores, exclude, None) } fn build_diff( @@ -5479,7 +5731,7 @@ impl TryFrom<(&CharBag, &PathMatcher, proto::Entry)> for Entry { let path = RelPath::from_proto(&entry.path).context("invalid relative path in proto message")?; let char_bag = char_bag_for_path(*root_char_bag, &path); - let is_always_included = always_included.is_match(path.as_std_path()); + let is_always_included = always_included.is_match(&path); Ok(Entry { id: ProjectEntryId::from_proto(entry.id), kind, @@ -5577,3 +5829,52 @@ async fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc

Result<()> { + Ok(()) + } + + fn remove(&self, _path: &Path) -> Result<()> { + Ok(()) + } +} + +fn decode_byte(bytes: Vec) -> (String, &'static Encoding, bool) { + // check BOM + if let Some((encoding, _bom_len)) = Encoding::for_bom(&bytes) { + let (cow, _) = encoding.decode_with_bom_removal(&bytes); + return (cow.into_owned(), encoding, true); + } + + fn detect_encoding(bytes: Vec) -> (String, &'static Encoding) { + let mut detector = EncodingDetector::new(); + detector.feed(&bytes, true); + + let encoding = detector.guess(None, true); // Use None for TLD hint to ensure neutral detection logic. + + let (cow, _, _) = encoding.decode(&bytes); + (cow.into_owned(), encoding) + } + + match String::from_utf8(bytes) { + Ok(text) => { + // ISO-2022-JP (and other ISO-2022 variants) consists entirely of 7-bit ASCII bytes, + // so it is valid UTF-8. However, it contains escape sequences starting with '\x1b'. + // If we find an escape character, we double-check the encoding to prevent + // displaying raw escape sequences instead of the correct characters. + if text.contains('\x1b') { + let (s, enc) = detect_encoding(text.into_bytes()); + (s, enc, false) + } else { + (text, encoding_rs::UTF_8, false) + } + } + Err(e) => { + let (s, enc) = detect_encoding(e.into_bytes()); + (s, enc, false) + } + } +} diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 94e83a16decd6b5d68498944e26ddcabecd27eed..a86720184ebf6d33755decf415ad97bdcfd7fd8c 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -25,25 +25,25 @@ pub struct WorktreeSettings { impl WorktreeSettings { pub fn is_path_private(&self, path: &RelPath) -> bool { path.ancestors() - .any(|ancestor| self.private_files.is_match(ancestor.as_std_path())) + .any(|ancestor| self.private_files.is_match(ancestor)) } pub fn is_path_excluded(&self, path: &RelPath) -> bool { path.ancestors() - .any(|ancestor| self.file_scan_exclusions.is_match(ancestor.as_std_path())) + .any(|ancestor| self.file_scan_exclusions.is_match(ancestor)) } pub fn is_path_always_included(&self, path: &RelPath, is_dir: bool) -> bool { if is_dir { - self.parent_dir_scan_inclusions.is_match(path.as_std_path()) + self.parent_dir_scan_inclusions.is_match(path) } else { - self.file_scan_inclusions.is_match(path.as_std_path()) + self.file_scan_inclusions.is_match(path) } } pub fn is_path_hidden(&self, path: &RelPath) -> bool { path.ancestors() - .any(|ancestor| self.hidden_files.is_match(ancestor.as_std_path())) + .any(|ancestor| self.hidden_files.is_match(ancestor)) } } @@ -66,7 +66,7 @@ impl Settings for WorktreeSettings { .collect(); Self { - project_name: worktree.project_name.into_inner(), + project_name: worktree.project_name, prevent_sharing_in_public_channels: worktree.prevent_sharing_in_public_channels, file_scan_exclusions: path_matchers(file_scan_exclusions, "file_scan_exclusions") .log_err() diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index e8d98b3508bd14f7ea8baaf1b985b42293eb078d..094a6d52ea4168752578eab06cea511a57e65c10 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1,7 +1,8 @@ use crate::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle}; -use anyhow::Result; +use anyhow::{Context as _, Result}; +use encoding_rs; use fs::{FakeFs, Fs, RealFs, RemoveOptions}; -use git::GITIGNORE; +use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE}; use gpui::{AppContext as _, BackgroundExecutor, BorrowAppContext, Context, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; @@ -19,6 +20,7 @@ use std::{ }; use util::{ ResultExt, path, + paths::PathStyle, rel_path::{RelPath, rel_path}, test::TempTree, }; @@ -44,6 +46,7 @@ async fn test_traversal(cx: &mut TestAppContext) { true, fs, Default::default(), + true, &mut cx.to_async(), ) .await @@ -108,6 +111,7 @@ async fn test_circular_symlinks(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -207,6 +211,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -357,6 +362,7 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -379,6 +385,7 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) { fs::RenameOptions { overwrite: true, ignore_if_exists: true, + create_parents: false, }, ) .await @@ -433,6 +440,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -597,6 +605,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -697,6 +706,7 @@ async fn test_write_file(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -715,6 +725,8 @@ async fn test_write_file(cx: &mut TestAppContext) { rel_path("tracked-dir/file.txt").into(), "hello".into(), Default::default(), + encoding_rs::UTF_8, + false, cx, ) }) @@ -726,6 +738,8 @@ async fn test_write_file(cx: &mut TestAppContext) { rel_path("ignored-dir/file.txt").into(), "world".into(), Default::default(), + encoding_rs::UTF_8, + false, cx, ) }) @@ -790,6 +804,7 @@ async fn test_file_scan_inclusions(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -855,6 +870,7 @@ async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -913,6 +929,7 @@ async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppC true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -998,6 +1015,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1079,6 +1097,7 @@ async fn test_hidden_files(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1189,6 +1208,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1300,6 +1320,7 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) { true, Arc::new(RealFs::new(None, cx.executor())), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1338,6 +1359,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { true, fs, Default::default(), + true, &mut cx.to_async(), ) .await @@ -1406,6 +1428,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { true, fs_fake, Default::default(), + true, &mut cx.to_async(), ) .await @@ -1447,6 +1470,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { true, fs_real, Default::default(), + true, &mut cx.to_async(), ) .await @@ -1532,6 +1556,177 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) { + // Tests the behavior of our worktree refresh when a file in a gitignored directory + // is created. + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "ignored_dir\n", + "ignored_dir": { + "existing_file.txt": "existing content", + "another_file.txt": "another content", + }, + }), + ) + .await; + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + true, + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap(); + assert!(ignored_dir.is_ignored); + assert_eq!(ignored_dir.kind, EntryKind::UnloadedDir); + }); + + tree.update(cx, |tree, cx| { + tree.load_file(rel_path("ignored_dir/existing_file.txt"), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, _| { + let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap(); + assert!(ignored_dir.is_ignored); + assert_eq!(ignored_dir.kind, EntryKind::Dir); + + assert!( + tree.entry_for_path(rel_path("ignored_dir/existing_file.txt")) + .is_some() + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/another_file.txt")) + .is_some() + ); + }); + + let entry = tree + .update(cx, |tree, cx| { + tree.create_entry(rel_path("ignored_dir/new_file.txt").into(), false, None, cx) + }) + .await + .unwrap(); + assert!(entry.into_included().is_some()); + + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _| { + let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap(); + assert!(ignored_dir.is_ignored); + assert_eq!( + ignored_dir.kind, + EntryKind::Dir, + "ignored_dir should still be loaded, not UnloadedDir" + ); + + assert!( + tree.entry_for_path(rel_path("ignored_dir/existing_file.txt")) + .is_some(), + "existing_file.txt should still be visible" + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/another_file.txt")) + .is_some(), + "another_file.txt should still be visible" + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/new_file.txt")) + .is_some(), + "new_file.txt should be visible" + ); + }); +} + +#[gpui::test] +async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAppContext) { + // Tests the behavior of our worktree refresh when a directory modification for a gitignored directory + // is triggered. + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "ignored_dir\n", + "ignored_dir": { + "file1.txt": "content1", + "file2.txt": "content2", + }, + }), + ) + .await; + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + true, + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + // Load a file to expand the ignored directory + tree.update(cx, |tree, cx| { + tree.load_file(rel_path("ignored_dir/file1.txt"), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, _| { + let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap(); + assert_eq!(ignored_dir.kind, EntryKind::Dir); + assert!( + tree.entry_for_path(rel_path("ignored_dir/file1.txt")) + .is_some() + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/file2.txt")) + .is_some() + ); + }); + + fs.emit_fs_event("/root/ignored_dir", Some(fs::PathEventKind::Changed)); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + let ignored_dir = tree.entry_for_path(rel_path("ignored_dir")).unwrap(); + assert_eq!( + ignored_dir.kind, + EntryKind::Dir, + "ignored_dir should still be loaded (Dir), not UnloadedDir" + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/file1.txt")) + .is_some(), + "file1.txt should still be visible after directory fs event" + ); + assert!( + tree.entry_for_path(rel_path("ignored_dir/file2.txt")) + .is_some(), + "file2.txt should still be visible after directory fs event" + ); + }); +} + #[gpui::test(iterations = 100)] async fn test_random_worktree_operations_during_initial_scan( cx: &mut TestAppContext, @@ -1558,6 +1753,7 @@ async fn test_random_worktree_operations_during_initial_scan( true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1648,6 +1844,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1720,6 +1917,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -1843,8 +2041,14 @@ fn randomly_mutate_worktree( }) } else { log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0); - let task = - worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx); + let task = worktree.write_file( + entry.path.clone(), + "".into(), + Default::default(), + encoding_rs::UTF_8, + false, + cx, + ); cx.background_spawn(async move { task.await?; Ok(()) @@ -1986,6 +2190,7 @@ async fn randomly_mutate_fs( fs::RenameOptions { overwrite: true, ignore_if_exists: true, + create_parents: false, }, ) .await @@ -2032,6 +2237,7 @@ async fn test_private_single_file_worktree(cx: &mut TestAppContext) { true, fs.clone(), Default::default(), + true, &mut cx.to_async(), ) .await @@ -2064,6 +2270,7 @@ async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestA true, fs.clone(), Arc::default(), + true, &mut cx.to_async(), ) .await @@ -2141,6 +2348,7 @@ async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppCon true, fs.clone(), Arc::default(), + true, &mut cx.to_async(), ) .await @@ -2216,6 +2424,94 @@ async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppCon }); } +#[gpui::test] +async fn test_repo_exclude(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(executor); + let project_dir = Path::new(path!("/project")); + fs.insert_tree( + project_dir, + json!({ + ".git": { + "info": { + "exclude": ".env.*" + } + }, + ".env.example": "secret=xxxx", + ".env.local": "secret=1234", + ".gitignore": "!.env.example", + "README.md": "# Repo Exclude", + "src": { + "main.rs": "fn main() {}", + }, + }), + ) + .await; + + let worktree = Worktree::local( + project_dir, + true, + fs.clone(), + Default::default(), + true, + &mut cx.to_async(), + ) + .await + .unwrap(); + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .await; + cx.run_until_parked(); + + // .gitignore overrides .git/info/exclude + worktree.update(cx, |worktree, _cx| { + let expected_excluded_paths = []; + let expected_ignored_paths = [".env.local"]; + let expected_tracked_paths = [".env.example", "README.md", "src/main.rs"]; + let expected_included_paths = []; + + check_worktree_entries( + worktree, + &expected_excluded_paths, + &expected_ignored_paths, + &expected_tracked_paths, + &expected_included_paths, + ); + }); + + // Ignore statuses are updated when .git/info/exclude file changes + fs.write( + &project_dir.join(DOT_GIT).join(REPO_EXCLUDE), + ".env.example".as_bytes(), + ) + .await + .unwrap(); + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .await; + cx.run_until_parked(); + + worktree.update(cx, |worktree, _cx| { + let expected_excluded_paths = []; + let expected_ignored_paths = []; + let expected_tracked_paths = [".env.example", ".env.local", "README.md", "src/main.rs"]; + let expected_included_paths = []; + + check_worktree_entries( + worktree, + &expected_excluded_paths, + &expected_ignored_paths, + &expected_tracked_paths, + &expected_included_paths, + ); + }); +} + #[track_caller] fn check_worktree_entries( tree: &Worktree, @@ -2268,3 +2564,176 @@ fn init_test(cx: &mut gpui::TestAppContext) { cx.set_global(settings_store); }); } + +#[gpui::test] +async fn test_load_file_encoding(cx: &mut TestAppContext) { + init_test(cx); + let test_cases: Vec<(&str, &[u8], &str)> = vec![ + ("utf8.txt", "こんにちは".as_bytes(), "こんにちは"), // "こんにちは" is Japanese "Hello" + ( + "sjis.txt", + &[0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd], + "こんにちは", + ), + ( + "eucjp.txt", + &[0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf], + "こんにちは", + ), + ( + "iso2022jp.txt", + &[ + 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b, + 0x28, 0x42, + ], + "こんにちは", + ), + // Western Europe (Windows-1252) + // "Café" -> 0xE9 is 'é' in Windows-1252 (it is typically 0xC3 0xA9 in UTF-8) + ("win1252.txt", &[0x43, 0x61, 0x66, 0xe9], "Café"), + // Chinese Simplified (GBK) + // Note: We use a slightly longer string here because short byte sequences can be ambiguous + // in multi-byte encodings. Providing more context helps the heuristic detector guess correctly. + // Text: "今天天气不错" (Today's weather is not bad / nice) + // Bytes: + // 今: BD F1 + // 天: CC EC + // 天: CC EC + // 气: C6 F8 + // 不: B2 BB + // 错: B4 ED + ( + "gbk.txt", + &[ + 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed, + ], + "今天天气不错", + ), + ( + "utf16le_bom.txt", + &[ + 0xFF, 0xFE, // BOM + 0x53, 0x30, // こ + 0x93, 0x30, // ん + 0x6B, 0x30, // に + 0x61, 0x30, // ち + 0x6F, 0x30, // は + ], + "こんにちは", + ), + ( + "utf8_bom.txt", + &[ + 0xEF, 0xBB, 0xBF, // UTF-8 BOM + 0xE3, 0x81, 0x93, // こ + 0xE3, 0x82, 0x93, // ん + 0xE3, 0x81, 0xAB, // に + 0xE3, 0x81, 0xA1, // ち + 0xE3, 0x81, 0xAF, // は + ], + "こんにちは", + ), + ]; + + let root_path = if cfg!(windows) { + Path::new("C:\\root") + } else { + Path::new("/root") + }; + + let fs = FakeFs::new(cx.background_executor.clone()); + + let mut files_json = serde_json::Map::new(); + for (name, _, _) in &test_cases { + files_json.insert(name.to_string(), serde_json::Value::String("".to_string())); + } + + for (name, bytes, _) in &test_cases { + let path = root_path.join(name); + fs.write(&path, bytes).await.unwrap(); + } + + let tree = Worktree::local( + root_path, + true, + fs, + Default::default(), + true, + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + for (name, _, expected) in test_cases { + let loaded = tree + .update(cx, |tree, cx| tree.load_file(rel_path(name), cx)) + .await + .with_context(|| format!("Failed to load {}", name)) + .unwrap(); + + assert_eq!( + loaded.text, expected, + "Encoding mismatch for file: {}", + name + ); + } +} + +#[gpui::test] +async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let root_path = if cfg!(windows) { + Path::new("C:\\root") + } else { + Path::new("/root") + }; + fs.create_dir(root_path).await.unwrap(); + let file_path = root_path.join("test.txt"); + + fs.insert_file(&file_path, "initial".into()).await; + + let worktree = Worktree::local( + root_path, + true, + fs.clone(), + Default::default(), + true, + &mut cx.to_async(), + ) + .await + .unwrap(); + + let path: Arc = Path::new("test.txt").into(); + let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc(); + + let text = text::Rope::from("こんにちは"); + + let task = worktree.update(cx, |wt, cx| { + wt.write_file( + rel_path, + text, + text::LineEnding::Unix, + encoding_rs::SHIFT_JIS, + false, + cx, + ) + }); + + task.await.unwrap(); + + let bytes = fs.load_bytes(&file_path).await.unwrap(); + + let expected_bytes = vec![ + 0x82, 0xb1, // こ + 0x82, 0xf1, // ん + 0x82, 0xc9, // に + 0x82, 0xbf, // ち + 0x82, 0xcd, // は + ]; + + assert_eq!(bytes, expected_bytes, "Should be saved as Shift-JIS"); +} diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index aac231b511684db9e2bfc36c822a963e5f231161..072a893a6a8f4fc7fbc8a6f4f5ed43316915b974 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -30,6 +30,17 @@ pub enum Model { alias = "grok-4-fast-non-reasoning-latest" )] Grok4FastNonReasoning, + #[serde( + rename = "grok-4-1-fast-non-reasoning", + alias = "grok-4-1-fast-non-reasoning-latest" + )] + Grok41FastNonReasoning, + #[serde( + rename = "grok-4-1-fast-reasoning", + alias = "grok-4-1-fast-reasoning-latest", + alias = "grok-4-1-fast" + )] + Grok41FastReasoning, #[serde(rename = "grok-code-fast-1", alias = "grok-code-fast-1-0825")] GrokCodeFast1, #[serde(rename = "custom")] @@ -56,6 +67,9 @@ impl Model { "grok-4" => Ok(Self::Grok4), "grok-4-fast-reasoning" => Ok(Self::Grok4FastReasoning), "grok-4-fast-non-reasoning" => Ok(Self::Grok4FastNonReasoning), + "grok-4-1-fast-non-reasoning" => Ok(Self::Grok41FastNonReasoning), + "grok-4-1-fast-reasoning" => Ok(Self::Grok41FastReasoning), + "grok-4-1-fast" => Ok(Self::Grok41FastReasoning), "grok-2-vision" => Ok(Self::Grok2Vision), "grok-3" => Ok(Self::Grok3), "grok-3-mini" => Ok(Self::Grok3Mini), @@ -76,6 +90,8 @@ impl Model { Self::Grok4 => "grok-4", Self::Grok4FastReasoning => "grok-4-fast-reasoning", Self::Grok4FastNonReasoning => "grok-4-fast-non-reasoning", + Self::Grok41FastNonReasoning => "grok-4-1-fast-non-reasoning", + Self::Grok41FastReasoning => "grok-4-1-fast-reasoning", Self::GrokCodeFast1 => "grok-code-fast-1", Self::Custom { name, .. } => name, } @@ -91,6 +107,8 @@ impl Model { Self::Grok4 => "Grok 4", Self::Grok4FastReasoning => "Grok 4 Fast", Self::Grok4FastNonReasoning => "Grok 4 Fast (Non-Reasoning)", + Self::Grok41FastNonReasoning => "Grok 4.1 Fast (Non-Reasoning)", + Self::Grok41FastReasoning => "Grok 4.1 Fast", Self::GrokCodeFast1 => "Grok Code Fast 1", Self::Custom { name, display_name, .. @@ -102,7 +120,10 @@ impl Model { match self { Self::Grok3 | Self::Grok3Mini | Self::Grok3Fast | Self::Grok3MiniFast => 131_072, Self::Grok4 | Self::GrokCodeFast1 => 256_000, - Self::Grok4FastReasoning | Self::Grok4FastNonReasoning => 128_000, + Self::Grok4FastReasoning + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning => 2_000_000, Self::Grok2Vision => 8_192, Self::Custom { max_tokens, .. } => *max_tokens, } @@ -114,6 +135,8 @@ impl Model { Self::Grok4 | Self::Grok4FastReasoning | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning | Self::GrokCodeFast1 => Some(64_000), Self::Grok2Vision => Some(4_096), Self::Custom { @@ -131,7 +154,9 @@ impl Model { | Self::Grok3MiniFast | Self::Grok4 | Self::Grok4FastReasoning - | Self::Grok4FastNonReasoning => true, + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning => true, Self::Custom { parallel_tool_calls: Some(support), .. @@ -154,6 +179,8 @@ impl Model { | Self::Grok4 | Self::Grok4FastReasoning | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning | Self::GrokCodeFast1 => true, Self::Custom { supports_tools: Some(support), @@ -165,7 +192,12 @@ impl Model { pub fn supports_images(&self) -> bool { match self { - Self::Grok2Vision => true, + Self::Grok2Vision + | Self::Grok4 + | Self::Grok4FastReasoning + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning => true, Self::Custom { supports_images: Some(support), .. diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 8b641dd8ce1f769dbc517f06e0a4e5824a61380c..fd160759f4440e2736d57cea62abb6bdb138ae72 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.213.0" +version = "0.219.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -10,6 +10,9 @@ authors = ["Zed Team "] [lints] workspace = true +[features] +tracy = ["ztracing/tracy"] + [[bin]] name = "zed" path = "src/zed-main.rs" @@ -23,13 +26,13 @@ acp_tools.workspace = true activity_indicator.workspace = true agent_settings.workspace = true agent_ui.workspace = true +agent_ui_v2.workspace = true anyhow.workspace = true askpass.workspace = true assets.workspace = true audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true -backtrace = "0.3" bincode.workspace = true breadcrumbs.workspace = true call.workspace = true @@ -51,7 +54,6 @@ debugger_tools.workspace = true debugger_ui.workspace = true diagnostics.workspace = true editor.workspace = true -zeta2_tools.workspace = true env_logger.workspace = true extension.workspace = true extension_host.workspace = true @@ -75,7 +77,8 @@ gpui = { workspace = true, features = [ gpui_tokio.workspace = true rayon.workspace = true -edit_prediction_button.workspace = true +edit_prediction.workspace = true +edit_prediction_ui.workspace = true http_client.workspace = true image_viewer.workspace = true inspector_ui.workspace = true @@ -97,9 +100,9 @@ markdown.workspace = true markdown_preview.workspace = true menu.workspace = true migrator.workspace = true +miniprofiler_ui.workspace = true mimalloc = { version = "0.1", optional = true } nc.workspace = true -nix = { workspace = true, features = ["pthread", "signal"] } node_runtime.workspace = true notifications.workspace = true onboarding.workspace = true @@ -139,13 +142,14 @@ tab_switcher.workspace = true task.workspace = true tasks_ui.workspace = true telemetry.workspace = true -telemetry_events.workspace = true terminal_view.workspace = true theme.workspace = true theme_extension.workspace = true theme_selector.workspace = true time.workspace = true title_bar.workspace = true +ztracing.workspace = true +tracing.workspace = true toolchain_selector.workspace = true ui.workspace = true ui_input.workspace = true @@ -159,16 +163,17 @@ vim_mode_setting.workspace = true watch.workspace = true web_search.workspace = true web_search_providers.workspace = true +which_key.workspace = true workspace.workspace = true zed_actions.workspace = true zed_env_vars.workspace = true -zeta.workspace = true -zeta2.workspace = true zlog.workspace = true zlog_settings.workspace = true +chrono.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true +chrono.workspace = true [target.'cfg(target_os = "windows")'.build-dependencies] winresource = "0.1" @@ -186,10 +191,15 @@ itertools.workspace = true language = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } +semver.workspace = true terminal_view = { workspace = true, features = ["test-support"] } tree-sitter-md.workspace = true tree-sitter-rust.workspace = true workspace = { workspace = true, features = ["test-support"] } +agent_ui = { workspace = true, features = ["test-support"] } +agent_ui_v2 = { workspace = true, features = ["test-support"] } +search = { workspace = true, features = ["test-support"] } + [package.metadata.bundle-dev] icon = ["resources/app-icon-dev@2x.png", "resources/app-icon-dev.png"] @@ -224,4 +234,4 @@ osx_info_plist_exts = ["resources/info/*"] osx_url_schemes = ["zed"] [package.metadata.cargo-machete] -ignored = ["profiling", "zstd"] +ignored = ["profiling", "zstd", "tracing"] diff --git a/crates/zed/build.rs b/crates/zed/build.rs index be420defa3aba17a739ffe18b24512078fce2b3a..dd26a1152e965414abc102c37ff570a7ba7184d4 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -32,12 +32,16 @@ fn main() { println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); + if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") { + println!("cargo:rustc-env=ZED_BUILD_ID={build_identifier}"); + } + if let Ok(build_profile) = std::env::var("PROFILE") && build_profile == "release" { // This is currently the best way to make `cargo build ...`'s build script // to print something to stdout without extra verbosity. - println!("cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"); + println!("cargo::warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"); } } @@ -49,6 +53,25 @@ fn main() { println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024); } + if cfg!(target_arch = "x86_64") { + println!("cargo::rerun-if-changed=resources\\windows\\bin\\x64\\conpty.dll"); + println!("cargo::rerun-if-changed=resources\\windows\\bin\\x64\\OpenConsole.exe"); + let conpty_target = std::env::var("OUT_DIR").unwrap() + "\\..\\..\\..\\conpty.dll"; + match std::fs::copy("resources/windows/bin/x64/conpty.dll", &conpty_target) { + Ok(_) => println!("Copied conpty.dll to {conpty_target}"), + Err(e) => println!("cargo::warning=Failed to copy conpty.dll: {}", e), + } + let open_console_target = + std::env::var("OUT_DIR").unwrap() + "\\..\\..\\..\\OpenConsole.exe"; + match std::fs::copy( + "resources/windows/bin/x64/OpenConsole.exe", + &open_console_target, + ) { + Ok(_) => println!("Copied OpenConsole.exe to {open_console_target}"), + Err(e) => println!("cargo::warning=Failed to copy OpenConsole.exe: {}", e), + } + } + let release_channel = option_env!("RELEASE_CHANNEL").unwrap_or("dev"); let icon = match release_channel { "stable" => "resources/windows/app-icon.ico", diff --git a/crates/zed/resources/Document.icns b/crates/zed/resources/Document.icns new file mode 100644 index 0000000000000000000000000000000000000000..5d0185c81a32c214f213f12243aeab01e32830e1 Binary files /dev/null and b/crates/zed/resources/Document.icns differ diff --git a/crates/zed/resources/windows/bin/x64/OpenConsole.exe b/crates/zed/resources/windows/bin/x64/OpenConsole.exe new file mode 100644 index 0000000000000000000000000000000000000000..8bb6ab2188fd7a56adc941c3f8449265e762cf06 Binary files /dev/null and b/crates/zed/resources/windows/bin/x64/OpenConsole.exe differ diff --git a/crates/zed/resources/windows/bin/x64/conpty.dll b/crates/zed/resources/windows/bin/x64/conpty.dll new file mode 100644 index 0000000000000000000000000000000000000000..555d6bf655a7cb0427b630f1052bb873c837a152 Binary files /dev/null and b/crates/zed/resources/windows/bin/x64/conpty.dll differ diff --git a/crates/zed/resources/zed.entitlements b/crates/zed/resources/zed.entitlements index cb4cd3dc692160047ae5012489a350829c4a1ccf..2a16afe7551f433e3f835a2097df61a2e9e86ee1 100644 --- a/crates/zed/resources/zed.entitlements +++ b/crates/zed/resources/zed.entitlements @@ -22,5 +22,9 @@ com.apple.security.personal-information.photos-library + com.apple.security.files.user-selected.read-write + + com.apple.security.files.downloads.read-write + diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 736ade7dba62faf66a1fca10ee102d75c5378e14..9ccbe7c742b494f7c4772d2c411ff284d1ffddf0 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -3,7 +3,7 @@ mod zed; use agent_ui::AgentPanel; use anyhow::{Context as _, Error, Result}; -use clap::{Parser, command}; +use clap::Parser; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{Client, ProxySettings, UserStore, parse_zed_link}; use collab_ui::channel_view::ChannelView; @@ -29,7 +29,7 @@ use reqwest_client::ReqwestClient; use assets::Assets; use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; -use project::project_settings::ProjectSettings; +use project::{project_settings::ProjectSettings, trusted_worktrees}; use recent_projects::{SshSettings, open_remote_project}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; @@ -39,7 +39,8 @@ use std::{ io::{self, IsTerminal}, path::{Path, PathBuf}, process, - sync::Arc, + sync::{Arc, OnceLock}, + time::Instant, }; use theme::{ActiveTheme, GlobalTheme, ThemeRegistry}; use util::{ResultExt, TryFutureExt, maybe}; @@ -51,8 +52,8 @@ use workspace::{ use zed::{ OpenListener, OpenRequest, RawOpenRequest, app_menus, build_window_options, derive_paths_with_position, edit_prediction_registry, handle_cli_connection, - handle_keymap_file_changes, handle_settings_changed, handle_settings_file_changes, - initialize_workspace, open_paths_with_positions, + handle_keymap_file_changes, handle_settings_file_changes, initialize_workspace, + open_paths_with_positions, }; use crate::zed::{OpenRequestKind, eager_load_active_theme_and_icon_theme}; @@ -131,6 +132,7 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) { process::exit(1); } + // Maybe unify this with gpui::platform::linux::platform::ResultExt::notify_err(..)? #[cfg(any(target_os = "linux", target_os = "freebsd"))] { use ashpd::desktop::notification::{Notification, NotificationProxy, Priority}; @@ -163,8 +165,11 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) { .detach(); } } +pub static STARTUP_TIME: OnceLock = OnceLock::new(); pub fn main() { + STARTUP_TIME.get_or_init(|| Instant::now()); + #[cfg(unix)] util::prevent_root_execution(); @@ -237,6 +242,7 @@ pub fn main() { } zlog::init(); + if stdout_is_a_pty() { zlog::init_output_stdout(); } else { @@ -246,10 +252,12 @@ pub fn main() { zlog::init_output_stdout(); }; } + ztracing::init(); - let app_version = AppVersion::load(env!("CARGO_PKG_VERSION")); + let version = option_env!("ZED_BUILD_ID"); let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|commit_sha| AppCommitSha::new(commit_sha.to_string())); + let app_version = AppVersion::load(env!("CARGO_PKG_VERSION"), version, app_commit_sha.clone()); if args.system_specs { let system_specs = system_specs::SystemSpecs::new_stateless( @@ -262,7 +270,7 @@ pub fn main() { } rayon::ThreadPoolBuilder::new() - .num_threads(4) + .num_threads(std::thread::available_parallelism().map_or(1, |n| n.get().div_ceil(2))) .stack_size(10 * 1024 * 1024) .thread_name(|ix| format!("RayonWorker{}", ix)) .build_global() @@ -283,14 +291,16 @@ pub fn main() { let app = Application::new().with_assets(Assets); - let system_id = app.background_executor().block(system_id()).ok(); - let installation_id = app.background_executor().block(installation_id()).ok(); + let system_id = app.background_executor().spawn(system_id()); + let installation_id = app.background_executor().spawn(installation_id()); let session_id = Uuid::new_v4().to_string(); - let session = app.background_executor().block(Session::new()); + let session = app + .background_executor() + .spawn(Session::new(session_id.clone())); app.background_executor() .spawn(crashes::init(InitCrashHandler { - session_id: session_id.clone(), + session_id, zed_version: app_version.to_string(), binary: "zed".to_string(), release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), @@ -338,7 +348,9 @@ pub fn main() { } else { None }; - log::info!("Using git binary path: {:?}", git_binary_path); + if let Some(git_binary_path) = &git_binary_path { + log::info!("Using git binary path: {:?}", git_binary_path); + } let fs = Arc::new(RealFs::new(git_binary_path, app.background_executor())); let user_settings_file_rx = watch_config_file( @@ -396,6 +408,14 @@ pub fn main() { }); app.run(move |cx| { + let trusted_paths = match workspace::WORKSPACE_DB.fetch_trusted_worktrees(None, None, cx) { + Ok(trusted_paths) => trusted_paths, + Err(e) => { + log::error!("Failed to do initial trusted worktrees fetch: {e:#}"); + HashMap::default() + } + }; + trusted_worktrees::init(trusted_paths, None, None, cx); menu::init(); zed_actions::init(); @@ -406,12 +426,7 @@ pub fn main() { } settings::init(cx); zlog_settings::init(cx); - handle_settings_file_changes( - user_settings_file_rx, - global_settings_file_rx, - cx, - handle_settings_changed, - ); + handle_settings_file_changes(user_settings_file_rx, global_settings_file_rx, cx); handle_keymap_file_changes(user_keymap_file_rx, cx); let user_agent = format!( @@ -469,6 +484,7 @@ pub fn main() { tx.send(Some(options)).log_err(); }) .detach(); + let node_runtime = NodeRuntime::new(client.http_client(), Some(shell_env_loaded_rx), rx); debug_adapter_extension::init(extension_host_proxy.clone(), cx); @@ -504,11 +520,16 @@ pub fn main() { debugger_ui::init(cx); debugger_tools::init(cx); client::init(&client, cx); + + let system_id = cx.background_executor().block(system_id).ok(); + let installation_id = cx.background_executor().block(installation_id).ok(); + let session = cx.background_executor().block(session); + let telemetry = client.telemetry(); telemetry.start( system_id.as_ref().map(|id| id.to_string()), installation_id.as_ref().map(|id| id.to_string()), - session_id.clone(), + session.id().to_owned(), cx, ); @@ -544,11 +565,7 @@ pub fn main() { auto_update::init(client.clone(), cx); dap_adapters::init(cx); auto_update_ui::init(cx); - reliability::init( - client.http_client(), - system_id.as_ref().map(|id| id.to_string()), - cx, - ); + reliability::init(client.clone(), cx); extension_host::init( extension_host_proxy.clone(), app_state.fs.clone(), @@ -577,7 +594,7 @@ pub fn main() { language_model::init(app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); - zeta2_tools::init(cx); + edit_prediction_ui::init(cx); web_search::init(cx); web_search_providers::init(app_state.client.clone(), cx); snippet_provider::init(cx); @@ -591,6 +608,7 @@ pub fn main() { false, cx, ); + agent_ui_v2::agents_panel::init(cx); repl::init(app_state.fs.clone(), cx); recent_projects::init(cx); @@ -636,9 +654,11 @@ pub fn main() { settings_ui::init(cx); keymap_editor::init(cx); extensions_ui::init(cx); - zeta::init(cx); + edit_prediction::init(cx); inspector_ui::init(app_state.clone(), cx); json_schema_store::init(cx); + miniprofiler_ui::init(*STARTUP_TIME.get().unwrap(), cx); + which_key::init(cx); cx.observe_global::({ let http = app_state.client.http_client(); @@ -794,7 +814,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut workspace::get_any_active_workspace(app_state, cx.clone()).await?; workspace.update(cx, |workspace, window, cx| { if let Some(panel) = workspace.panel::(cx) { - panel.focus_handle(cx).focus(window); + panel.focus_handle(cx).focus(window, cx); } }) }) @@ -886,6 +906,44 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }), ); } + OpenRequestKind::GitCommit { sha } => { + cx.spawn(async move |cx| { + let paths_with_position = + derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await; + let (workspace, _results) = open_paths_with_positions( + &paths_with_position, + &[], + app_state, + workspace::OpenOptions::default(), + cx, + ) + .await?; + + workspace + .update(cx, |workspace, window, cx| { + let Some(repo) = workspace.project().read(cx).active_repository(cx) + else { + log::error!("no active repository found for commit view"); + return Err(anyhow::anyhow!("no active repository found")); + }; + + git_ui::commit_view::CommitView::open( + sha, + repo.downgrade(), + workspace.weak_handle(), + None, + None, + window, + cx, + ); + Ok(()) + }) + .log_err(); + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } } return; @@ -1160,7 +1218,13 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp app_state, cx, |workspace, window, cx| { - Editor::new_file(workspace, &Default::default(), window, cx) + let restore_on_startup = WorkspaceSettings::get_global(cx).restore_on_startup; + match restore_on_startup { + workspace::RestoreOnStartupBehavior::Launchpad => {} + _ => { + Editor::new_file(workspace, &Default::default(), window, cx); + } + } }, ) })? @@ -1239,6 +1303,7 @@ fn init_paths() -> HashMap> { paths::database_dir(), paths::logs_dir(), paths::temp_dir(), + paths::hang_traces_dir(), ] .into_iter() .fold(HashMap::default(), |mut errors, path| { @@ -1254,7 +1319,7 @@ pub fn stdout_is_a_pty() -> bool { } #[derive(Parser, Debug)] -#[command(name = "zed", disable_version_flag = true)] +#[command(name = "zed", disable_version_flag = true, max_term_width = 100)] struct Args { /// A sequence of space-separated paths or urls that you want to open. /// @@ -1269,11 +1334,12 @@ struct Args { diff: Vec, /// Sets a custom directory for all user data (e.g., database, extensions, logs). + /// /// This overrides the default platform-specific data directory location. /// On macOS, the default is `~/Library/Application Support/Zed`. /// On Linux/FreeBSD, the default is `$XDG_DATA_HOME/zed`. /// On Windows, the default is `%LOCALAPPDATA%\Zed`. - #[arg(long, value_name = "DIR")] + #[arg(long, value_name = "DIR", verbatim_doc_comment)] user_data_dir: Option, /// The username and WSL distribution to use when opening paths. If not specified, @@ -1293,8 +1359,11 @@ struct Args { #[arg(long)] dev_server_token: Option, - /// Prints system specs. Useful for submitting issues on GitHub when encountering a bug - /// that prevents Zed from starting, so you can't run `zed: copy system specs to clipboard` + /// Prints system specs. + /// + /// Useful for submitting issues on GitHub when encountering a bug that + /// prevents Zed from starting, so you can't run `zed: copy system specs to + /// clipboard` #[arg(long)] system_specs: bool, diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index dcabe93aab4ff35de44b77b87eb8495f537564fe..da8dffa85d57162a62dd6ae0a698d975d22ee374 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -1,41 +1,42 @@ use anyhow::{Context as _, Result}; -use client::{TelemetrySettings, telemetry::MINIDUMP_ENDPOINT}; -use futures::AsyncReadExt; -use gpui::{App, AppContext as _}; -use http_client::{self, HttpClient, HttpClientWithUrl}; +use client::{Client, telemetry::MINIDUMP_ENDPOINT}; +use futures::{AsyncReadExt, TryStreamExt}; +use gpui::{App, AppContext as _, SerializedThreadTaskTimings}; +use http_client::{self, AsyncBody, HttpClient, Request}; +use log::info; use project::Project; use proto::{CrashReport, GetCrashFilesResponse}; use reqwest::multipart::{Form, Part}; -use settings::Settings; use smol::stream::StreamExt; -use std::{ffi::OsStr, fs, sync::Arc}; +use std::{ffi::OsStr, fs, sync::Arc, thread::ThreadId, time::Duration}; use util::ResultExt; -pub fn init(http_client: Arc, installation_id: Option, cx: &mut App) { - #[cfg(target_os = "macos")] - monitor_main_thread_hangs(http_client.clone(), installation_id.clone(), cx); +use crate::STARTUP_TIME; - if client::TelemetrySettings::get_global(cx).diagnostics { - let client = http_client.clone(); - let id = installation_id.clone(); +pub fn init(client: Arc, cx: &mut App) { + monitor_hangs(cx); + + if client.telemetry().diagnostics_enabled() { + let client = client.clone(); cx.background_spawn(async move { - upload_previous_minidumps(client, id).await.warn_on_err(); + upload_previous_minidumps(client).await.warn_on_err(); }) .detach() } cx.observe_new(move |project: &mut Project, _, cx| { - let http_client = http_client.clone(); - let installation_id = installation_id.clone(); + let client = client.clone(); let Some(remote_client) = project.remote_client() else { return; }; - remote_client.update(cx, |client, cx| { - if !TelemetrySettings::get_global(cx).diagnostics { + remote_client.update(cx, |remote_client, cx| { + if !client.telemetry().diagnostics_enabled() { return; } - let request = client.proto_client().request(proto::GetCrashFiles {}); + let request = remote_client + .proto_client() + .request(proto::GetCrashFiles {}); cx.background_spawn(async move { let GetCrashFilesResponse { crashes } = request.await?; @@ -48,15 +49,9 @@ pub fn init(http_client: Arc, installation_id: Option } in crashes { if let Some(metadata) = serde_json::from_str(&metadata).log_err() { - upload_minidump( - http_client.clone(), - endpoint, - minidump_contents, - &metadata, - installation_id.clone(), - ) - .await - .log_err(); + upload_minidump(client.clone(), endpoint, minidump_contents, &metadata) + .await + .log_err(); } } @@ -68,91 +63,13 @@ pub fn init(http_client: Arc, installation_id: Option .detach(); } -#[cfg(target_os = "macos")] -pub fn monitor_main_thread_hangs( - http_client: Arc, - installation_id: Option, - cx: &App, -) { - // This is too noisy to ship to stable for now. - if !matches!( - ReleaseChannel::global(cx), - ReleaseChannel::Dev | ReleaseChannel::Nightly | ReleaseChannel::Preview - ) { - return; - } - - use nix::sys::signal::{ - SaFlags, SigAction, SigHandler, SigSet, - Signal::{self, SIGUSR2}, - sigaction, - }; - - use parking_lot::Mutex; - - use http_client::Method; - use release_channel::ReleaseChannel; - use std::{ - ffi::c_int, - sync::{OnceLock, mpsc}, - time::Duration, - }; - use telemetry_events::{BacktraceFrame, HangReport}; - - use nix::sys::pthread; +fn monitor_hangs(cx: &App) { + let main_thread_id = std::thread::current().id(); let foreground_executor = cx.foreground_executor(); let background_executor = cx.background_executor(); - let telemetry_settings = *client::TelemetrySettings::get_global(cx); - - // Initialize SIGUSR2 handler to send a backtrace to a channel. - let (backtrace_tx, backtrace_rx) = mpsc::channel(); - static BACKTRACE: Mutex> = Mutex::new(Vec::new()); - static BACKTRACE_SENDER: OnceLock> = OnceLock::new(); - BACKTRACE_SENDER.get_or_init(|| backtrace_tx); - BACKTRACE.lock().reserve(100); - - fn handle_backtrace_signal() { - unsafe { - extern "C" fn handle_sigusr2(_i: c_int) { - unsafe { - // ASYNC SIGNAL SAFETY: This lock is only accessed one other time, - // which can only be triggered by This signal handler. In addition, - // this signal handler is immediately removed by SA_RESETHAND, and this - // signal handler cannot be re-entrant due to the SIGUSR2 mask defined - // below - let mut bt = BACKTRACE.lock(); - bt.clear(); - backtrace::trace_unsynchronized(|frame| { - if bt.len() < bt.capacity() { - bt.push(frame.clone()); - true - } else { - false - } - }); - } - - BACKTRACE_SENDER.get().unwrap().send(()).ok(); - } - - let mut mask = SigSet::empty(); - mask.add(SIGUSR2); - sigaction( - Signal::SIGUSR2, - &SigAction::new( - SigHandler::Handler(handle_sigusr2), - SaFlags::SA_RESTART | SaFlags::SA_RESETHAND, - mask, - ), - ) - .log_err(); - } - } - - handle_backtrace_signal(); - let main_thread = pthread::pthread_self(); + // 3 seconds hang let (mut tx, mut rx) = futures::channel::mpsc::channel(3); foreground_executor .spawn(async move { while (rx.next().await).is_some() {} }) @@ -162,120 +79,79 @@ pub fn monitor_main_thread_hangs( .spawn({ let background_executor = background_executor.clone(); async move { + let mut hang_time = None; + + let mut hanging = false; loop { background_executor.timer(Duration::from_secs(1)).await; match tx.try_send(()) { - Ok(_) => continue, + Ok(_) => { + hang_time = None; + hanging = false; + continue; + } Err(e) => { - if e.into_send_error().is_full() { - pthread::pthread_kill(main_thread, SIGUSR2).log_err(); + let is_full = e.into_send_error().is_full(); + if is_full && !hanging { + hanging = true; + hang_time = Some(chrono::Local::now()); + } + + if is_full { + save_hang_trace( + main_thread_id, + &background_executor, + hang_time.unwrap(), + ); } - // Only detect the first hang - break; } } } } }) .detach(); +} - let app_version = release_channel::AppVersion::global(cx); - let os_name = client::telemetry::os_name(); - - background_executor - .clone() - .spawn(async move { - let os_version = client::telemetry::os_version(); - - loop { - while backtrace_rx.recv().is_ok() { - if !telemetry_settings.diagnostics { - return; - } - - // ASYNC SIGNAL SAFETY: This lock is only accessed _after_ - // the backtrace transmitter has fired, which itself is only done - // by the signal handler. And due to SA_RESETHAND the signal handler - // will not run again until `handle_backtrace_signal` is called. - let raw_backtrace = BACKTRACE.lock().drain(..).collect::>(); - let backtrace: Vec<_> = raw_backtrace - .into_iter() - .map(|frame| { - let mut btf = BacktraceFrame { - ip: frame.ip() as usize, - symbol_addr: frame.symbol_address() as usize, - base: frame.module_base_address().map(|addr| addr as usize), - symbols: vec![], - }; - - backtrace::resolve_frame(&frame, |symbol| { - if let Some(name) = symbol.name() { - btf.symbols.push(name.to_string()); - } - }); - - btf - }) - .collect(); - - // IMPORTANT: Don't move this to before `BACKTRACE.lock()` - handle_backtrace_signal(); - - log::error!( - "Suspected hang on main thread:\n{}", - backtrace - .iter() - .flat_map(|bt| bt.symbols.first().as_ref().map(|s| s.as_str())) - .collect::>() - .join("\n") - ); - - let report = HangReport { - backtrace, - app_version: Some(app_version), - os_name: os_name.clone(), - os_version: Some(os_version.clone()), - architecture: std::env::consts::ARCH.into(), - installation_id: installation_id.clone(), - }; +fn save_hang_trace( + main_thread_id: ThreadId, + background_executor: &gpui::BackgroundExecutor, + hang_time: chrono::DateTime, +) { + let thread_timings = background_executor.dispatcher.get_all_timings(); + let thread_timings = thread_timings + .into_iter() + .map(|mut timings| { + if timings.thread_id == main_thread_id { + timings.thread_name = Some("main".to_string()); + } - let Some(json_bytes) = serde_json::to_vec(&report).log_err() else { - continue; - }; + SerializedThreadTaskTimings::convert(*STARTUP_TIME.get().unwrap(), timings) + }) + .collect::>(); - let Some(checksum) = client::telemetry::calculate_json_checksum(&json_bytes) - else { - continue; - }; + let trace_path = paths::hang_traces_dir().join(&format!( + "hang-{}.miniprof", + hang_time.format("%Y-%m-%d_%H-%M-%S") + )); - let Ok(url) = http_client.build_zed_api_url("/telemetry/hangs", &[]) else { - continue; - }; + let Some(timings) = serde_json::to_string(&thread_timings) + .context("hang timings serialization") + .log_err() + else { + return; + }; - let Ok(request) = http_client::Request::builder() - .method(Method::POST) - .uri(url.as_ref()) - .header("x-zed-checksum", checksum) - .body(json_bytes.into()) - else { - continue; - }; + std::fs::write(&trace_path, timings) + .context("hang trace file writing") + .log_err(); - if let Some(response) = http_client.send(request).await.log_err() - && response.status() != 200 - { - log::error!("Failed to send hang report: HTTP {:?}", response.status()); - } - } - } - }) - .detach() + info!( + "hang detected, trace file saved at: {}", + trace_path.display() + ); } -pub async fn upload_previous_minidumps( - http: Arc, - installation_id: Option, -) -> anyhow::Result<()> { +pub async fn upload_previous_minidumps(client: Arc) -> anyhow::Result<()> { let Some(minidump_endpoint) = MINIDUMP_ENDPOINT.as_ref() else { log::warn!("Minidump endpoint not set"); return Ok(()); @@ -292,13 +168,12 @@ pub async fn upload_previous_minidumps( json_path.set_extension("json"); if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?) && upload_minidump( - http.clone(), + client.clone(), minidump_endpoint, smol::fs::read(&child_path) .await .context("Failed to read minidump")?, &metadata, - installation_id.clone(), ) .await .log_err() @@ -312,11 +187,10 @@ pub async fn upload_previous_minidumps( } async fn upload_minidump( - http: Arc, + client: Arc, endpoint: &str, minidump: Vec, metadata: &crashes::CrashInfo, - installation_id: Option, ) -> Result<()> { let mut form = Form::new() .part( @@ -343,8 +217,19 @@ async fn upload_minidump( if let Some(minidump_error) = metadata.minidump_error.clone() { form = form.text("minidump_error", minidump_error); } - if let Some(id) = installation_id.clone() { - form = form.text("sentry[user][id]", id) + + if let Some(id) = client.telemetry().metrics_id() { + form = form.text("sentry[user][id]", id.to_string()); + form = form.text( + "sentry[user][is_staff]", + if client.telemetry().is_staff().unwrap_or_default() { + "true" + } else { + "false" + }, + ); + } else if let Some(id) = client.telemetry().installation_id() { + form = form.text("sentry[user][id]", format!("installation-{}", id)) } ::telemetry::event!( @@ -411,8 +296,14 @@ async fn upload_minidump( // TODO: feature-flag-context, and more of device-context like screen resolution, available ram, device model, etc + let stream = form + .into_stream() + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) + .into_async_read(); + let body = AsyncBody::from_reader(stream); + let req = Request::builder().uri(endpoint).body(body)?; let mut response_text = String::new(); - let mut response = http.send_multipart_form(endpoint, form).await?; + let mut response = client.http_client().send(req).await?; response .body_mut() .read_to_string(&mut response_text) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 7cdaea920f4b90de4393dd08e0c855ecd1cb2f88..d088df00839814e32a9c246a3486ac5ad5ca4b9e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -10,6 +10,7 @@ mod quick_action_bar; pub(crate) mod windows_only_instance; use agent_ui::{AgentDiffToolbar, AgentPanelDelegate}; +use agent_ui_v2::agents_panel::AgentsPanel; use anyhow::Context as _; pub use app_menus::*; use assets::Assets; @@ -22,16 +23,17 @@ use editor::{Editor, MultiBuffer}; use extension_host::ExtensionStore; use feature_flags::{FeatureFlagAppExt, PanicFeatureFlag}; use fs::Fs; +use futures::FutureExt as _; use futures::future::Either; use futures::{StreamExt, channel::mpsc, select_biased}; use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::ProjectDiffToolbar; use gpui::{ - Action, App, AppContext as _, Context, DismissEvent, Element, Entity, Focusable, KeyBinding, - ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task, - TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, point, - px, retain_all, + Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity, + Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, + Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowKind, WindowOptions, actions, + image_cache, point, px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -53,12 +55,12 @@ use project_panel::ProjectPanel; use prompt_store::PromptBuilder; use quick_action_bar::QuickActionBar; use recent_projects::open_remote_project; -use release_channel::{AppCommitSha, ReleaseChannel}; +use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use rope::Rope; use search::project_search::ProjectSearchBar; use settings::{ BaseKeymap, DEFAULT_KEYMAP_PATH, InvalidSettingsError, KeybindSource, KeymapFile, - KeymapFileLoadResult, Settings, SettingsStore, VIM_KEYMAP_PATH, + KeymapFileLoadResult, MigrationStatus, Settings, SettingsStore, VIM_KEYMAP_PATH, initial_local_debug_tasks_content, initial_project_settings_content, initial_tasks_content, update_settings_file, }; @@ -80,8 +82,9 @@ use vim_mode_setting::VimModeSetting; use workspace::notifications::{ NotificationId, SuppressEvent, dismiss_app_notification, show_app_notification, }; +use workspace::utility_pane::utility_slot_for_dock_position; use workspace::{ - AppState, NewFile, NewWindow, OpenLog, Toast, Workspace, WorkspaceSettings, + AppState, NewFile, NewWindow, OpenLog, Panel, Toast, Workspace, WorkspaceSettings, create_and_open_local_file, notifications::simple_message_notification::MessageNotification, open_new, }; @@ -107,8 +110,8 @@ actions!( Minimize, /// Opens the default settings file. OpenDefaultSettings, - /// Opens project-specific settings. - OpenProjectSettings, + /// Opens project-specific settings file. + OpenProjectSettingsFile, /// Opens the project tasks configuration. OpenProjectTasks, /// Opens the tasks panel. @@ -158,15 +161,15 @@ pub fn init(cx: &mut App) { || flag.await { cx.update(|cx| { - cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action")); - cx.on_action(|_: &TestCrash, _| { - unsafe extern "C" { - fn puts(s: *const i8); - } - unsafe { - puts(0xabad1d3a as *const i8); - } - }); + cx.on_action(|_: &TestPanic, _| panic!("Ran the TestPanic action")) + .on_action(|_: &TestCrash, _| { + unsafe extern "C" { + fn puts(s: *const i8); + } + unsafe { + puts(0xabad1d3a as *const i8); + } + }); }) .ok(); }; @@ -176,11 +179,11 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |workspace, window, cx| { open_log_file(workspace, window, cx); }); - }); - cx.on_action(|_: &workspace::RevealLogInFileManager, cx| { + }) + .on_action(|_: &workspace::RevealLogInFileManager, cx| { cx.reveal_path(paths::log_file().as_path()); - }); - cx.on_action(|_: &zed_actions::OpenLicenses, cx| { + }) + .on_action(|_: &zed_actions::OpenLicenses, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { open_bundled_file( workspace, @@ -191,13 +194,13 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &zed_actions::OpenTelemetryLog, cx| { + }) + .on_action(|_: &zed_actions::OpenTelemetryLog, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { open_telemetry_log_file(workspace, window, cx); }); - }); - cx.on_action(|&zed_actions::OpenKeymapFile, cx| { + }) + .on_action(|&zed_actions::OpenKeymapFile, cx| { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::keymap_file(), @@ -206,8 +209,8 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &OpenSettingsFile, cx| { + }) + .on_action(|_: &OpenSettingsFile, cx| { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::settings_file(), @@ -216,13 +219,13 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &OpenAccountSettings, cx| { + }) + .on_action(|_: &OpenAccountSettings, cx| { with_active_or_new_workspace(cx, |_, _, cx| { cx.open_url(&zed_urls::account_url(cx)); }); - }); - cx.on_action(|_: &OpenTasks, cx| { + }) + .on_action(|_: &OpenTasks, cx| { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::tasks_file(), @@ -231,8 +234,8 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &OpenDebugTasks, cx| { + }) + .on_action(|_: &OpenDebugTasks, cx| { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::debug_scenarios_file(), @@ -241,8 +244,8 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &OpenDefaultSettings, cx| { + }) + .on_action(|_: &OpenDefaultSettings, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { open_bundled_file( workspace, @@ -253,8 +256,8 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &zed_actions::OpenDefaultKeymap, cx| { + }) + .on_action(|_: &zed_actions::OpenDefaultKeymap, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { open_bundled_file( workspace, @@ -265,8 +268,8 @@ pub fn init(cx: &mut App) { cx, ); }); - }); - cx.on_action(|_: &zed_actions::About, cx| { + }) + .on_action(|_: &zed_actions::About, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { about(workspace, window, cx); }); @@ -307,7 +310,10 @@ pub fn build_window_options(display_uuid: Option, cx: &mut App) -> WindowO let window_decorations = match std::env::var("ZED_WINDOW_DECORATIONS") { Ok(val) if val == "server" => gpui::WindowDecorations::Server, Ok(val) if val == "client" => gpui::WindowDecorations::Client, - _ => gpui::WindowDecorations::Client, + _ => match WorkspaceSettings::get_global(cx).window_decorations { + settings::WindowDecorations::Server => gpui::WindowDecorations::Server, + settings::WindowDecorations::Client => gpui::WindowDecorations::Client, + }, }; let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs; @@ -347,6 +353,8 @@ pub fn initialize_workspace( ) { let mut _on_close_subscription = bind_on_window_closed(cx); cx.observe_global::(move |cx| { + // A 1.92 regression causes unused-assignment to trigger on this variable. + _ = _on_close_subscription.is_some(); _on_close_subscription = bind_on_window_closed(cx); }) .detach(); @@ -393,9 +401,12 @@ pub fn initialize_workspace( } } + #[cfg(target_os = "windows")] + unstable_version_notification(cx); + let edit_prediction_menu_handle = PopoverMenuHandle::default(); - let edit_prediction_button = cx.new(|cx| { - edit_prediction_button::EditPredictionButton::new( + let edit_prediction_ui = cx.new(|cx| { + edit_prediction_ui::EditPredictionButton::new( app_state.fs.clone(), app_state.user_store.clone(), edit_prediction_menu_handle.clone(), @@ -404,7 +415,7 @@ pub fn initialize_workspace( ) }); workspace.register_action({ - move |_, _: &edit_prediction_button::ToggleMenu, window, cx| { + move |_, _: &edit_prediction_ui::ToggleMenu, window, cx| { edit_prediction_menu_handle.toggle(window, cx); } }); @@ -443,7 +454,7 @@ pub fn initialize_workspace( status_bar.add_left_item(lsp_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); status_bar.add_left_item(activity_indicator, window, cx); - status_bar.add_right_item(edit_prediction_button, window, cx); + status_bar.add_right_item(edit_prediction_ui, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); status_bar.add_right_item(active_toolchain_language, window, cx); status_bar.add_right_item(line_ending_indicator, window, cx); @@ -466,11 +477,58 @@ pub fn initialize_workspace( initialize_panels(prompt_builder.clone(), window, cx); register_actions(app_state.clone(), workspace, window, cx); - workspace.focus_handle(cx).focus(window); + workspace.focus_handle(cx).focus(window, cx); }) .detach(); } +#[cfg(target_os = "windows")] +fn unstable_version_notification(cx: &mut App) { + if !matches!( + ReleaseChannel::try_global(cx), + Some(ReleaseChannel::Nightly) + ) { + return; + } + let db_key = "zed_windows_nightly_notif_shown_at".to_owned(); + let time = chrono::Utc::now(); + if let Some(last_shown) = db::kvp::KEY_VALUE_STORE + .read_kvp(&db_key) + .log_err() + .flatten() + .and_then(|timestamp| chrono::DateTime::parse_from_rfc3339(×tamp).ok()) + { + if time.fixed_offset() - last_shown < chrono::Duration::days(7) { + return; + } + } + cx.spawn(async move |_| { + db::kvp::KEY_VALUE_STORE + .write_kvp(db_key, time.to_rfc3339()) + .await + }) + .detach_and_log_err(cx); + struct WindowsNightly; + show_app_notification(NotificationId::unique::(), cx, |cx| { + cx.new(|cx| { + MessageNotification::new("You're using an unstable version of Zed (Nightly)", cx) + .primary_message("Download Stable") + .primary_icon_color(Color::Accent) + .primary_icon(IconName::Download) + .primary_on_click(|window, cx| { + window.dispatch_action( + zed_actions::OpenBrowser { + url: "https://zed.dev/download".to_string(), + } + .boxed_clone(), + cx, + ); + cx.emit(DismissEvent); + }) + }) + }); +} + #[cfg(any(target_os = "linux", target_os = "freebsd"))] fn initialize_file_watcher(window: &mut Window, cx: &mut Context) { if let Err(e) = fs::fs_watcher::global(|_| {}) { @@ -602,105 +660,143 @@ fn initialize_panels( ); let debug_panel = DebugPanel::load(workspace_handle.clone(), cx); - let ( - project_panel, - outline_panel, - terminal_panel, - git_panel, - channels_panel, - notification_panel, - debug_panel, - ) = futures::try_join!( - project_panel, - outline_panel, - git_panel, - terminal_panel, - channels_panel, - notification_panel, - debug_panel, - )?; - - workspace_handle.update_in(cx, |workspace, window, cx| { - workspace.add_panel(project_panel, window, cx); - workspace.add_panel(outline_panel, window, cx); - workspace.add_panel(terminal_panel, window, cx); - workspace.add_panel(git_panel, window, cx); - workspace.add_panel(channels_panel, window, cx); - workspace.add_panel(notification_panel, window, cx); - workspace.add_panel(debug_panel, window, cx); - })?; - - fn setup_or_teardown_agent_panel( - workspace: &mut Workspace, - prompt_builder: Arc, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - let disable_ai = SettingsStore::global(cx) - .get::(None) - .disable_ai - || cfg!(test); - let existing_panel = workspace.panel::(cx); - match (disable_ai, existing_panel) { - (false, None) => cx.spawn_in(window, async move |workspace, cx| { - let panel = - agent_ui::AgentPanel::load(workspace.clone(), prompt_builder, cx.clone()) - .await?; - workspace.update_in(cx, |workspace, window, cx| { - let disable_ai = SettingsStore::global(cx) - .get::(None) - .disable_ai; - let have_panel = workspace.panel::(cx).is_some(); - if !disable_ai && !have_panel { - workspace.add_panel(panel, window, cx); - } + async fn add_panel_when_ready( + panel_task: impl Future>> + 'static, + workspace_handle: WeakEntity, + mut cx: gpui::AsyncWindowContext, + ) { + if let Some(panel) = panel_task.await.context("failed to load panel").log_err() + { + workspace_handle + .update_in(&mut cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); }) - }), - (true, Some(existing_panel)) => { - workspace.remove_panel::(&existing_panel, window, cx); - Task::ready(Ok(())) - } - _ => Task::ready(Ok(())), + .log_err(); } } - workspace_handle - .update_in(cx, |workspace, window, cx| { - setup_or_teardown_agent_panel(workspace, prompt_builder.clone(), window, cx) - })? - .await?; + futures::join!( + add_panel_when_ready(project_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(outline_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(terminal_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(git_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(channels_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(notification_panel, workspace_handle.clone(), cx.clone()), + add_panel_when_ready(debug_panel, workspace_handle.clone(), cx.clone()), + initialize_agent_panel(workspace_handle.clone(), prompt_builder, cx.clone()).map(|r| r.log_err()), + initialize_agents_panel(workspace_handle, cx.clone()).map(|r| r.log_err()) + ); + + anyhow::Ok(()) + }) + .detach(); +} - workspace_handle.update_in(cx, |workspace, window, cx| { - cx.observe_global_in::(window, { - let prompt_builder = prompt_builder.clone(); - move |workspace, window, cx| { - setup_or_teardown_agent_panel(workspace, prompt_builder.clone(), window, cx) - .detach_and_log_err(cx); +fn setup_or_teardown_ai_panel( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + load_panel: impl FnOnce( + WeakEntity, + AsyncWindowContext, + ) -> Task>> + + 'static, +) -> Task> { + let disable_ai = SettingsStore::global(cx) + .get::(None) + .disable_ai + || cfg!(test); + let existing_panel = workspace.panel::

(cx); + match (disable_ai, existing_panel) { + (false, None) => cx.spawn_in(window, async move |workspace, cx| { + let panel = load_panel(workspace.clone(), cx.clone()).await?; + workspace.update_in(cx, |workspace, window, cx| { + let disable_ai = SettingsStore::global(cx) + .get::(None) + .disable_ai; + let have_panel = workspace.panel::

(cx).is_some(); + if !disable_ai && !have_panel { + workspace.add_panel(panel, window, cx); } }) - .detach(); + }), + (true, Some(existing_panel)) => { + workspace.remove_panel::

(&existing_panel, window, cx); + Task::ready(Ok(())) + } + _ => Task::ready(Ok(())), + } +} - // Register the actions that are shared between `assistant` and `assistant2`. - // - // We need to do this here instead of within the individual `init` - // functions so that we only register the actions once. - // - // Once we ship `assistant2` we can push this back down into `agent::agent_panel::init`. - if !cfg!(test) { - ::set_global( - Arc::new(agent_ui::ConcreteAssistantPanelDelegate), - cx, - ); +async fn initialize_agent_panel( + workspace_handle: WeakEntity, + prompt_builder: Arc, + mut cx: AsyncWindowContext, +) -> anyhow::Result<()> { + workspace_handle + .update_in(&mut cx, |workspace, window, cx| { + let prompt_builder = prompt_builder.clone(); + setup_or_teardown_ai_panel(workspace, window, cx, move |workspace, cx| { + agent_ui::AgentPanel::load(workspace, prompt_builder, cx) + }) + })? + .await?; + + workspace_handle.update_in(&mut cx, |workspace, window, cx| { + let prompt_builder = prompt_builder.clone(); + cx.observe_global_in::(window, move |workspace, window, cx| { + let prompt_builder = prompt_builder.clone(); + setup_or_teardown_ai_panel(workspace, window, cx, move |workspace, cx| { + agent_ui::AgentPanel::load(workspace, prompt_builder, cx) + }) + .detach_and_log_err(cx); + }) + .detach(); - workspace - .register_action(agent_ui::AgentPanel::toggle_focus) - .register_action(agent_ui::InlineAssistant::inline_assist); - } - })?; + // Register the actions that are shared between `assistant` and `assistant2`. + // + // We need to do this here instead of within the individual `init` + // functions so that we only register the actions once. + // + // Once we ship `assistant2` we can push this back down into `agent::agent_panel::init`. + if !cfg!(test) { + ::set_global( + Arc::new(agent_ui::ConcreteAssistantPanelDelegate), + cx, + ); - anyhow::Ok(()) - }) - .detach(); + workspace + .register_action(agent_ui::AgentPanel::toggle_focus) + .register_action(agent_ui::InlineAssistant::inline_assist); + } + })?; + + anyhow::Ok(()) +} + +async fn initialize_agents_panel( + workspace_handle: WeakEntity, + mut cx: AsyncWindowContext, +) -> anyhow::Result<()> { + workspace_handle + .update_in(&mut cx, |workspace, window, cx| { + setup_or_teardown_ai_panel(workspace, window, cx, |workspace, cx| { + AgentsPanel::load(workspace, cx) + }) + })? + .await?; + + workspace_handle.update_in(&mut cx, |_workspace, window, cx| { + cx.observe_global_in::(window, move |workspace, window, cx| { + setup_or_teardown_ai_panel(workspace, window, cx, |workspace, cx| { + AgentsPanel::load(workspace, cx) + }) + .detach_and_log_err(cx); + }) + .detach(); + })?; + + anyhow::Ok(()) } fn register_actions( @@ -726,7 +822,24 @@ fn register_actions( ..Default::default() }) }) - .register_action(|_, action: &OpenBrowser, _window, cx| cx.open_url(&action.url)) + .register_action(|workspace, action: &OpenBrowser, _window, cx| { + // Parse and validate the URL to ensure it's properly formatted + match url::Url::parse(&action.url) { + Ok(parsed_url) => { + // Use the parsed URL's string representation which is properly escaped + cx.open_url(parsed_url.as_str()); + } + Err(e) => { + workspace.show_error( + &anyhow::anyhow!( + "Opening this URL in a browser failed because the URL is invalid: {}\n\nError was: {e}", + action.url + ), + cx, + ); + } + } + }) .register_action(|workspace, _: &workspace::Open, window, cx| { telemetry::event!("Project Opened"); let paths = workspace.prompt_for_open_path( @@ -935,7 +1048,7 @@ fn register_actions( .register_action(open_project_debug_tasks_file) .register_action( |workspace: &mut Workspace, - _: &project_panel::ToggleFocus, + _: &zed_actions::project_panel::ToggleFocus, window: &mut Window, cx: &mut Context| { workspace.toggle_panel_focus::(window, cx); @@ -975,6 +1088,18 @@ fn register_actions( workspace.toggle_panel_focus::(window, cx); }, ) + .register_action( + |workspace: &mut Workspace, + _: &zed_actions::agent::ToggleAgentPane, + window: &mut Window, + cx: &mut Context| { + if let Some(panel) = workspace.panel::(cx) { + let position = panel.read(cx).position(window, cx); + let slot = utility_slot_for_dock_position(position); + workspace.toggle_utility_pane(slot, window, cx); + } + }, + ) .register_action({ let app_state = Arc::downgrade(&app_state); move |_, _: &NewWindow, _, cx| { @@ -985,7 +1110,21 @@ fn register_actions( cx, |workspace, window, cx| { cx.activate(true); - Editor::new_file(workspace, &Default::default(), window, cx) + // Create buffer synchronously to avoid flicker + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer("", None, true, cx) + }); + let editor = cx.new(|cx| { + Editor::for_buffer(buffer, Some(project), window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(editor), + None, + true, + window, + cx, + ); }, ) .detach(); @@ -1087,7 +1226,7 @@ fn initialize_pane( toolbar.add_item(migration_banner, window, cx); let project_diff_toolbar = cx.new(|cx| ProjectDiffToolbar::new(workspace, cx)); toolbar.add_item(project_diff_toolbar, window, cx); - let commit_view_toolbar = cx.new(|cx| CommitViewToolbar::new(workspace, cx)); + let commit_view_toolbar = cx.new(|_| CommitViewToolbar::new()); toolbar.add_item(commit_view_toolbar, window, cx); let agent_diff_toolbar = cx.new(AgentDiffToolbar::new); toolbar.add_item(agent_diff_toolbar, window, cx); @@ -1098,7 +1237,9 @@ fn initialize_pane( } fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context) { + use std::fmt::Write; let release_channel = ReleaseChannel::global(cx).display_name(); + let full_version = AppVersion::global(cx); let version = env!("CARGO_PKG_VERSION"); let debug = if cfg!(debug_assertions) { "(debug)" @@ -1106,7 +1247,16 @@ fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context) { "" }; let message = format!("{release_channel} {version} {debug}"); - let detail = AppCommitSha::try_global(cx).map(|sha| sha.full()); + + let mut detail = AppCommitSha::try_global(cx) + .map(|sha| sha.full()) + .unwrap_or_default(); + if !detail.is_empty() { + detail.push('\n'); + } + _ = write!(&mut detail, "\n{full_version}"); + + let detail = Some(detail); let prompt = window.prompt( PromptLevel::Info, @@ -1296,44 +1446,62 @@ fn open_log_file(workspace: &mut Workspace, window: &mut Window, cx: &mut Contex .detach(); } -pub fn handle_settings_file_changes( - mut user_settings_file_rx: mpsc::UnboundedReceiver, - mut global_settings_file_rx: mpsc::UnboundedReceiver, - cx: &mut App, - settings_changed: impl Fn(Option, &mut App) + 'static, -) { - MigrationNotification::set_global(cx.new(|_| MigrationNotification), cx); +fn notify_settings_errors(result: settings::SettingsParseResult, is_user: bool, cx: &mut App) { + if let settings::ParseStatus::Failed { error: err } = &result.parse_status { + let settings_type = if is_user { "user" } else { "global" }; + log::error!("Failed to load {} settings: {err}", settings_type); + } - // Helper function to process settings content - let process_settings = move |content: String, - is_user: bool, - store: &mut SettingsStore, - cx: &mut App| - -> bool { - let result = if is_user { - store.set_user_settings(&content, cx) - } else { - store.set_global_settings(&content, cx) - }; + let error = match result.parse_status { + settings::ParseStatus::Failed { error } => Some(anyhow::format_err!(error)), + settings::ParseStatus::Success => None, + }; + let id = NotificationId::Named(format!("failed-to-parse-settings-{is_user}").into()); - let id = NotificationId::Named("failed-to-migrate-settings".into()); - // Apply migrations to both user and global settings - let content_migrated = match result.migration_status { - settings::MigrationStatus::Succeeded => { - dismiss_app_notification(&id, cx); - true - } - settings::MigrationStatus::NotNeeded => { - dismiss_app_notification(&id, cx); + let showed_parse_error = match error { + Some(error) => { + if let Some(InvalidSettingsError::LocalSettings { .. }) = + error.downcast_ref::() + { false + // Local settings errors are displayed by the projects + } else { + show_app_notification(id, cx, move |cx| { + cx.new(|cx| { + MessageNotification::new(format!("Invalid user settings file\n{error}"), cx) + .primary_message("Open Settings File") + .primary_icon(IconName::Settings) + .primary_on_click(|window, cx| { + window.dispatch_action( + zed_actions::OpenSettingsFile.boxed_clone(), + cx, + ); + cx.emit(DismissEvent); + }) + }) + }); + true } - settings::MigrationStatus::Failed { error: err } => { + } + None => { + dismiss_app_notification(&id, cx); + false + } + }; + let id = NotificationId::Named(format!("failed-to-migrate-settings-{is_user}").into()); + + match result.migration_status { + settings::MigrationStatus::Succeeded | settings::MigrationStatus::NotNeeded => { + dismiss_app_notification(&id, cx); + } + settings::MigrationStatus::Failed { error: err } => { + if !showed_parse_error { show_app_notification(id, cx, move |cx| { cx.new(|cx| { MessageNotification::new( format!( "Failed to migrate settings\n\ - {err}" + {err}" ), cx, ) @@ -1345,26 +1513,17 @@ pub fn handle_settings_file_changes( }) }) }); - // notify user here - false } - }; - - if let settings::ParseStatus::Failed { error: err } = &result.parse_status { - let settings_type = if is_user { "user" } else { "global" }; - log::error!("Failed to load {} settings: {err}", settings_type); } - - settings_changed( - match result.parse_status { - settings::ParseStatus::Failed { error } => Some(anyhow::format_err!(error)), - settings::ParseStatus::Success => None, - }, - cx, - ); - - content_migrated }; +} + +pub fn handle_settings_file_changes( + mut user_settings_file_rx: mpsc::UnboundedReceiver, + mut global_settings_file_rx: mpsc::UnboundedReceiver, + cx: &mut App, +) { + MigrationNotification::set_global(cx.new(|_| MigrationNotification), cx); // Initial load of both settings files let global_content = cx @@ -1377,8 +1536,8 @@ pub fn handle_settings_file_changes( .unwrap(); SettingsStore::update_global(cx, |store, cx| { - process_settings(global_content, false, store, cx); - process_settings(user_content, true, store, cx); + notify_settings_errors(store.set_user_settings(&user_content, cx), true, cx); + notify_settings_errors(store.set_global_settings(&global_content, cx), false, cx); }); // Watch for changes in both files @@ -1395,7 +1554,14 @@ pub fn handle_settings_file_changes( }; let result = cx.update_global(|store: &mut SettingsStore, cx| { - let migrating_in_memory = process_settings(content, is_user, store, cx); + let result = if is_user { + store.set_user_settings(&content, cx) + } else { + store.set_global_settings(&content, cx) + }; + let migrating_in_memory = + matches!(&result.migration_status, MigrationStatus::Succeeded); + notify_settings_errors(result, is_user, cx); if let Some(notifier) = MigrationNotification::try_global(cx) { notifier.update(cx, |_, cx| { cx.emit(MigrationEvent::ContentChanged { @@ -1539,6 +1705,7 @@ fn show_keymap_file_json_error( cx.new(|cx| { MessageNotification::new(message.clone(), cx) .primary_message("Open Keymap File") + .primary_icon(IconName::Settings) .primary_on_click(|window, cx| { window.dispatch_action(zed_actions::OpenKeymapFile.boxed_clone(), cx); cx.emit(DismissEvent); @@ -1597,16 +1764,18 @@ fn show_markdown_app_notification( cx.new(move |cx| { MessageNotification::new_from_builder(cx, move |window, cx| { image_cache(retain_all("notification-cache")) - .text_xs() - .child(markdown_preview::markdown_renderer::render_parsed_markdown( - &parsed_markdown.clone(), - Some(workspace_handle.clone()), - window, - cx, + .child(div().text_ui(cx).child( + markdown_preview::markdown_renderer::render_parsed_markdown( + &parsed_markdown.clone(), + Some(workspace_handle.clone()), + window, + cx, + ), )) .into_any() }) .primary_message(primary_button_message) + .primary_icon(IconName::Settings) .primary_on_click_arc(primary_button_on_click) }) }) @@ -1658,36 +1827,6 @@ pub fn load_default_keymap(cx: &mut App) { } } -pub fn handle_settings_changed(error: Option, cx: &mut App) { - struct SettingsParseErrorNotification; - let id = NotificationId::unique::(); - - match error { - Some(error) => { - if let Some(InvalidSettingsError::LocalSettings { .. }) = - error.downcast_ref::() - { - // Local settings errors are displayed by the projects - return; - } - show_app_notification(id, cx, move |cx| { - cx.new(|cx| { - MessageNotification::new(format!("Invalid user settings file\n{error}"), cx) - .primary_message("Open Settings File") - .primary_icon(IconName::Settings) - .primary_on_click(|window, cx| { - window.dispatch_action(zed_actions::OpenSettingsFile.boxed_clone(), cx); - cx.emit(DismissEvent); - }) - }) - }); - } - None => { - dismiss_app_notification(&id, cx); - } - } -} - pub fn open_new_ssh_project_from_project( workspace: &mut Workspace, paths: Vec, @@ -1716,7 +1855,7 @@ pub fn open_new_ssh_project_from_project( fn open_project_settings_file( workspace: &mut Workspace, - _: &OpenProjectSettings, + _: &OpenProjectSettingsFile, window: &mut Window, cx: &mut Context, ) { @@ -1851,53 +1990,67 @@ fn open_telemetry_log_file( window: &mut Window, cx: &mut Context, ) { - workspace.with_local_workspace(window, cx, move |workspace, window, cx| { - let app_state = workspace.app_state().clone(); - cx.spawn_in(window, async move |workspace, cx| { - async fn fetch_log_string(app_state: &Arc) -> Option { - let path = client::telemetry::Telemetry::log_file_path(); - app_state.fs.load(&path).await.log_err() - } + const HEADER: &str = concat!( + "// Zed collects anonymous usage data to help us understand how people are using the app.\n", + "// Telemetry can be disabled via the `settings.json` file.\n", + "// Here is the data that has been reported for the current session:\n", + ); + workspace + .with_local_workspace(window, cx, move |workspace, window, cx| { + let app_state = workspace.app_state().clone(); + cx.spawn_in(window, async move |workspace, cx| { + async fn fetch_log_string(app_state: &Arc) -> Option { + let path = client::telemetry::Telemetry::log_file_path(); + app_state.fs.load(&path).await.log_err() + } - let log = fetch_log_string(&app_state).await.unwrap_or_else(|| "// No data has been collected yet".to_string()); + let log = fetch_log_string(&app_state) + .await + .unwrap_or_else(|| "// No data has been collected yet".to_string()); - const MAX_TELEMETRY_LOG_LEN: usize = 5 * 1024 * 1024; - let mut start_offset = log.len().saturating_sub(MAX_TELEMETRY_LOG_LEN); - if let Some(newline_offset) = log[start_offset..].find('\n') { - start_offset += newline_offset + 1; - } - let log_suffix = &log[start_offset..]; - let header = concat!( - "// Zed collects anonymous usage data to help us understand how people are using the app.\n", - "// Telemetry can be disabled via the `settings.json` file.\n", - "// Here is the data that has been reported for the current session:\n", - ); - let content = format!("{}\n{}", header, log_suffix); - let json = app_state.languages.language_for_name("JSON").await.log_err(); - - workspace.update_in( cx, |workspace, window, cx| { - let project = workspace.project().clone(); - let buffer = project.update(cx, |project, cx| project.create_local_buffer(&content, json,false, cx)); - let buffer = cx.new(|cx| { - MultiBuffer::singleton(buffer, cx).with_title("Telemetry Log".into()) - }); - workspace.add_item_to_active_pane( - Box::new(cx.new(|cx| { - let mut editor = Editor::for_multibuffer(buffer, Some(project), window, cx); - editor.set_read_only(true); - editor.set_breadcrumb_header("Telemetry Log".into()); - editor - })), - None, - true, - window, cx, - ); - }).log_err()?; + const MAX_TELEMETRY_LOG_LEN: usize = 5 * 1024 * 1024; + let mut start_offset = log.len().saturating_sub(MAX_TELEMETRY_LOG_LEN); + if let Some(newline_offset) = log[start_offset..].find('\n') { + start_offset += newline_offset + 1; + } + let log_suffix = &log[start_offset..]; + let content = format!("{}\n{}", HEADER, log_suffix); + let json = app_state + .languages + .language_for_name("JSON") + .await + .log_err(); - Some(()) + workspace + .update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer(&content, json, false, cx) + }); + let buffer = cx.new(|cx| { + MultiBuffer::singleton(buffer, cx).with_title("Telemetry Log".into()) + }); + workspace.add_item_to_active_pane( + Box::new(cx.new(|cx| { + let mut editor = + Editor::for_multibuffer(buffer, Some(project), window, cx); + editor.set_read_only(true); + editor.set_breadcrumb_header("Telemetry Log".into()); + editor + })), + None, + true, + window, + cx, + ); + }) + .log_err()?; + + Some(()) + }) + .detach(); }) .detach(); - }).detach(); } fn open_bundled_file( @@ -2160,16 +2313,20 @@ mod tests { use super::*; use assets::Assets; use collections::HashSet; - use editor::{DisplayPoint, Editor, SelectionEffects, display_map::DisplayRow}; + use editor::{ + DisplayPoint, Editor, MultiBufferOffset, SelectionEffects, display_map::DisplayRow, + }; use gpui::{ - Action, AnyWindowHandle, App, AssetSource, BorrowAppContext, SemanticVersion, - TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle, actions, + Action, AnyWindowHandle, App, AssetSource, BorrowAppContext, TestAppContext, UpdateGlobal, + VisualTestContext, WindowHandle, actions, }; - use language::{LanguageMatcher, LanguageRegistry}; + use language::LanguageRegistry; + use languages::{markdown_lang, rust_lang}; use pretty_assertions::{assert_eq, assert_ne}; use project::{Project, ProjectPath}; + use semver::Version; use serde_json::json; - use settings::{SettingsStore, watch_config_file}; + use settings::{SaturatingBool, SettingsStore, watch_config_file}; use std::{ path::{Path, PathBuf}, time::Duration, @@ -2804,9 +2961,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window.root(cx).unwrap(); @@ -3236,9 +3391,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window.root(cx).unwrap(); @@ -3330,9 +3483,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window.root(cx).unwrap(); @@ -3403,7 +3554,7 @@ mod tests { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; project.update(cx, |project, _| { - project.languages().add(markdown_language()); + project.languages().add(markdown_lang()); project.languages().add(rust_lang()); }); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); @@ -3427,7 +3578,11 @@ mod tests { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "untitled"); assert!(Arc::ptr_eq( - &editor.buffer().read(cx).language_at(0, cx).unwrap(), + &editor + .buffer() + .read(cx) + .language_at(MultiBufferOffset(0), cx) + .unwrap(), &languages::PLAIN_TEXT )); editor.handle_input("hi", window, cx); @@ -3461,7 +3616,12 @@ mod tests { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name.rs"); assert_eq!( - editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + editor + .buffer() + .read(cx) + .language_at(MultiBufferOffset(0), cx) + .unwrap() + .name(), "Rust".into() ); }); @@ -3547,8 +3707,8 @@ mod tests { let project = Project::test(app_state.fs.clone(), [], cx).await; project.update(cx, |project, _| { - project.languages().add(rust_lang()); - project.languages().add(markdown_language()); + project.languages().add(language::rust_lang()); + project.languages().add(language::markdown_lang()); }); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); @@ -3567,7 +3727,11 @@ mod tests { .update(cx, |_, window, cx| { editor.update(cx, |editor, cx| { assert!(Arc::ptr_eq( - &editor.buffer().read(cx).language_at(0, cx).unwrap(), + &editor + .buffer() + .read(cx) + .language_at(MultiBufferOffset(0), cx) + .unwrap(), &languages::PLAIN_TEXT )); editor.handle_input("hi", window, cx); @@ -3591,7 +3755,12 @@ mod tests { editor.update(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!( - editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + editor + .buffer() + .read(cx) + .language_at(MultiBufferOffset(0), cx) + .unwrap() + .name(), "Rust".into() ) }); @@ -3618,9 +3787,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let workspace = window.root(cx).unwrap(); @@ -3722,9 +3889,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let pane = workspace @@ -4116,9 +4281,7 @@ mod tests { .await; let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(markdown_language()) - }); + project.update(cx, |project, _cx| project.languages().add(markdown_lang())); let workspace = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); let pane = workspace .read_with(cx, |workspace, _| workspace.active_pane().clone()) @@ -4396,7 +4559,7 @@ mod tests { app_state.fs.clone(), PathBuf::from("/global_settings.json"), ); - handle_settings_file_changes(settings_rx, global_settings_rx, cx, |_, _| {}); + handle_settings_file_changes(settings_rx, global_settings_rx, cx); handle_keymap_file_changes(keymap_rx, cx); }); workspace @@ -4514,7 +4677,7 @@ mod tests { app_state.fs.clone(), PathBuf::from("/global_settings.json"), ); - handle_settings_file_changes(settings_rx, global_settings_rx, cx, |_, _| {}); + handle_settings_file_changes(settings_rx, global_settings_rx, cx); handle_keymap_file_changes(keymap_rx, cx); }); @@ -4616,11 +4779,14 @@ mod tests { "action", "activity_indicator", "agent", + "agents", #[cfg(not(target_os = "macos"))] "app_menu", "assistant", "assistant2", "auto_update", + "branch_picker", + "bedrock", "branches", "buffer_search", "channel_modal", @@ -4645,10 +4811,12 @@ mod tests { "git_panel", "go_to_line", "icon_theme_selector", + "inline_assistant", "journal", "keymap_editor", "keystroke_input", "language_selector", + "welcome", "line_ending_selector", "lsp_tool", "markdown", @@ -4803,7 +4971,7 @@ mod tests { let state = Arc::get_mut(&mut app_state).unwrap(); state.build_window_options = build_window_options; - app_state.languages.add(markdown_language()); + app_state.languages.add(markdown_lang()); gpui_tokio::init(cx); theme::init(theme::LoadThemes::JustBase, cx); @@ -4812,7 +4980,7 @@ mod tests { call::init(app_state.client.clone(), app_state.user_store.clone(), cx); notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx); workspace::init(app_state.clone(), cx); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(Version::new(0, 0, 0), cx); command_palette::init(cx); editor::init(cx); collab_ui::init(&app_state, cx); @@ -4840,6 +5008,7 @@ mod tests { false, cx, ); + agent_ui_v2::agents_panel::init(cx); repl::init(app_state.fs.clone(), cx); repl::notebook::init(cx); tasks_ui::init(cx); @@ -4854,34 +5023,6 @@ mod tests { }) } - fn rust_lang() -> Arc { - Arc::new(language::Language::new( - language::LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - )) - } - - fn markdown_language() -> Arc { - Arc::new(language::Language::new( - language::LanguageConfig { - name: "Markdown".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["md".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_md::LANGUAGE.into()), - )) - } - #[track_caller] fn assert_key_bindings_for( window: AnyWindowHandle, @@ -5000,7 +5141,7 @@ mod tests { .update(cx, |workspace, window, cx| { // Call the exact function that contains the bug eprintln!("About to call open_project_settings_file"); - open_project_settings_file(workspace, &OpenProjectSettings, window, cx); + open_project_settings_file(workspace, &OpenProjectSettingsFile, window, cx); }) .unwrap(); @@ -5029,6 +5170,28 @@ mod tests { ); } + #[gpui::test] + async fn test_disable_ai_crash(cx: &mut gpui::TestAppContext) { + let app_state = init_test(cx); + cx.update(init); + let project = Project::test(app_state.fs.clone(), [], cx).await; + let _window = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); + + cx.run_until_parked(); + + cx.update(|cx| { + SettingsStore::update_global(cx, |settings_store, cx| { + settings_store.update_user_settings(cx, |settings| { + settings.disable_ai = Some(SaturatingBool(true)); + }); + }); + }); + + cx.run_until_parked(); + + // If this panics, the test has failed + } + #[gpui::test] async fn test_prefer_focused_window(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index b86889f60acb5f738c93012335ef27b091edc0e2..a7961ac6d4cb663353af1e4e0d1fe66cf43a80a3 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -39,7 +39,7 @@ pub fn app_menus(cx: &mut App) -> Vec

{ ], }), MenuItem::separator(), - MenuItem::action("Project Panel", project_panel::ToggleFocus), + MenuItem::action("Project Panel", zed_actions::project_panel::ToggleFocus), MenuItem::action("Outline Panel", outline_panel::ToggleFocus), MenuItem::action("Collab Panel", collab_panel::ToggleFocus), MenuItem::action("Terminal Panel", terminal_panel::ToggleFocus), @@ -69,7 +69,11 @@ pub fn app_menus(cx: &mut App) -> Vec { items: vec![ MenuItem::action("Open Settings", zed_actions::OpenSettings), MenuItem::action("Open Settings File", super::OpenSettingsFile), - MenuItem::action("Open Project Settings", super::OpenProjectSettings), + MenuItem::action("Open Project Settings", zed_actions::OpenProjectSettings), + MenuItem::action( + "Open Project Settings File", + super::OpenProjectSettingsFile, + ), MenuItem::action("Open Default Settings", super::OpenDefaultSettings), MenuItem::separator(), MenuItem::action("Open Keymap", zed_actions::OpenKeymap), @@ -165,7 +169,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::os_action("Paste", editor::actions::Paste, OsAction::Paste), MenuItem::separator(), MenuItem::action("Find", search::buffer_search::Deploy::find()), - MenuItem::action("Find In Project", workspace::DeploySearch::find()), + MenuItem::action("Find in Project", workspace::DeploySearch::find()), MenuItem::separator(), MenuItem::action( "Toggle Line Comment", @@ -243,7 +247,10 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Go to Definition", editor::actions::GoToDefinition), MenuItem::action("Go to Declaration", editor::actions::GoToDeclaration), MenuItem::action("Go to Type Definition", editor::actions::GoToTypeDefinition), - MenuItem::action("Find All References", editor::actions::FindAllReferences), + MenuItem::action( + "Find All References", + editor::actions::FindAllReferences::default(), + ), MenuItem::separator(), MenuItem::action("Next Problem", editor::actions::GoToDiagnostic::default()), MenuItem::action( @@ -273,7 +280,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::separator(), MenuItem::action("Toggle Breakpoint", editor::actions::ToggleBreakpoint), MenuItem::action("Edit Breakpoint", editor::actions::EditLogBreakpoint), - MenuItem::action("Clear all Breakpoints", debugger_ui::ClearAllBreakpoints), + MenuItem::action("Clear All Breakpoints", debugger_ui::ClearAllBreakpoints), ], }, Menu { diff --git a/crates/zed/src/zed/component_preview.rs b/crates/zed/src/zed/component_preview.rs index d62f39ef6306593eba4b5fe6bff427db036e82dc..e3c7fc8df542448d5b8b290e96405546be7b4b1e 100644 --- a/crates/zed/src/zed/component_preview.rs +++ b/crates/zed/src/zed/component_preview.rs @@ -161,7 +161,7 @@ impl ComponentPreview { component_preview.update_component_list(cx); let focus_handle = component_preview.filter_editor.read(cx).focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); Ok(component_preview) } @@ -627,7 +627,7 @@ impl Render for ComponentPreview { .collect() }), ) - .track_scroll(self.nav_scroll_handle.clone()) + .track_scroll(&self.nav_scroll_handle) .p_2p5() .w(px(231.)) // Matches perfectly with the size of the "Component Preview" tab, if that's the first one in the pane .h_full() @@ -653,10 +653,8 @@ impl Render for ComponentPreview { ) .child( v_flex() - .id("content-area") .flex_1() .size_full() - .overflow_y_scroll() .child( div() .p_2() @@ -665,14 +663,18 @@ impl Render for ComponentPreview { .border_color(cx.theme().colors().border) .child(self.filter_editor.clone()), ) - .child(match active_page { - PreviewPage::AllComponents => { - self.render_all_components(cx).into_any_element() - } - PreviewPage::Component(id) => self - .render_component_page(&id, window, cx) - .into_any_element(), - }), + .child( + div().id("content-area").flex_1().overflow_y_scroll().child( + match active_page { + PreviewPage::AllComponents => { + self.render_all_components(cx).into_any_element() + } + PreviewPage::Component(id) => self + .render_component_page(&id, window, cx) + .into_any_element(), + }, + ), + ), ) } } @@ -768,7 +770,7 @@ impl Item for ComponentPreview { self.workspace_id = workspace.database_id(); let focus_handle = self.filter_editor.read(cx).focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); } } @@ -934,15 +936,16 @@ impl ComponentPreviewPage { fn render_header(&self, _: &Window, cx: &App) -> impl IntoElement { v_flex() - .py_12() - .px_16() + .min_w_0() + .w_full() + .p_12() .gap_6() .bg(cx.theme().colors().surface_background) .border_b_1() .border_color(cx.theme().colors().border) .child( v_flex() - .gap_0p5() + .gap_1() .child( Label::new(self.component.scope().to_string()) .size(LabelSize::Small) @@ -959,7 +962,7 @@ impl ComponentPreviewPage { ), ) .when_some(self.component.description(), |this, description| { - this.child(div().text_sm().child(description)) + this.child(Label::new(description).size(LabelSize::Small)) }) } diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index fd16478b5a7ade4b8ef86924d2ce737cb2f62c56..51327bfc9ab715a1b11aa3c639ffd60b6b0a0ea8 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -1,18 +1,21 @@ use client::{Client, UserStore}; -use codestral::CodestralCompletionProvider; +use codestral::CodestralEditPredictionDelegate; use collections::HashMap; -use copilot::{Copilot, CopilotCompletionProvider}; +use copilot::{Copilot, CopilotEditPredictionDelegate}; +use edit_prediction::{SweepFeatureFlag, ZedEditPredictionDelegate, Zeta2FeatureFlag}; use editor::Editor; use feature_flags::FeatureFlagAppExt; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; use language_models::MistralLanguageModelProvider; -use settings::SettingsStore; +use settings::{ + EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME, + EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME, + EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, SettingsStore, +}; use std::{cell::RefCell, rc::Rc, sync::Arc}; -use supermaven::{Supermaven, SupermavenCompletionProvider}; +use supermaven::{Supermaven, SupermavenEditPredictionDelegate}; use ui::Window; -use zeta::ZetaEditPredictionProvider; -use zeta2::Zeta2FeatureFlag; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let editors: Rc, AnyWindowHandle>>> = Rc::default(); @@ -57,7 +60,7 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { }) .detach(); - cx.on_action(clear_zeta_edit_history); + cx.on_action(clear_edit_prediction_store_edit_history); let mut provider = all_language_settings(None, cx).edit_predictions.provider; cx.subscribe(&user_store, { @@ -98,11 +101,9 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { .detach(); } -fn clear_zeta_edit_history(_: &zeta::ClearHistory, cx: &mut App) { - if let Some(zeta) = zeta::Zeta::global(cx) { - zeta.update(cx, |zeta, _| zeta.clear_history()); - } else if let Some(zeta) = zeta2::Zeta::try_global(cx) { - zeta.update(cx, |zeta, _| zeta.clear_history()); +fn clear_edit_prediction_store_edit_history(_: &edit_prediction::ClearHistory, cx: &mut App) { + if let Some(ep_store) = edit_prediction::EditPredictionStore::try_global(cx) { + ep_store.update(cx, |ep_store, _| ep_store.clear_history()); } } @@ -144,23 +145,6 @@ fn register_backward_compatible_actions(editor: &mut Editor, cx: &mut Context| { - editor.next_edit_prediction(&Default::default(), window, cx); - }, - )) - .detach(); - editor - .register_action(cx.listener( - |editor, - _: &copilot::PreviousSuggestion, - window: &mut Window, - cx: &mut Context| { - editor.previous_edit_prediction(&Default::default(), window, cx); - }, - )) - .detach(); } fn assign_edit_prediction_provider( @@ -176,7 +160,7 @@ fn assign_edit_prediction_provider( match provider { EditPredictionProvider::None => { - editor.set_edit_prediction_provider::(None, window, cx); + editor.set_edit_prediction_provider::(None, window, cx); } EditPredictionProvider::Copilot => { if let Some(copilot) = Copilot::global(cx) { @@ -187,79 +171,67 @@ fn assign_edit_prediction_provider( copilot.register_buffer(&buffer, cx); }); } - let provider = cx.new(|_| CopilotCompletionProvider::new(copilot)); + let provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); editor.set_edit_prediction_provider(Some(provider), window, cx); } } EditPredictionProvider::Supermaven => { if let Some(supermaven) = Supermaven::global(cx) { - let provider = cx.new(|_| SupermavenCompletionProvider::new(supermaven)); + let provider = cx.new(|_| SupermavenEditPredictionDelegate::new(supermaven)); editor.set_edit_prediction_provider(Some(provider), window, cx); } } EditPredictionProvider::Codestral => { let http_client = client.http_client(); - let provider = cx.new(|_| CodestralCompletionProvider::new(http_client)); + let provider = cx.new(|_| CodestralEditPredictionDelegate::new(http_client)); editor.set_edit_prediction_provider(Some(provider), window, cx); } - EditPredictionProvider::Zed => { - if user_store.read(cx).current_user().is_some() { - let mut worktree = None; - - if let Some(buffer) = &singleton_buffer - && let Some(file) = buffer.read(cx).file() - { - let id = file.worktree_id(cx); - if let Some(inner_worktree) = editor - .project() - .and_then(|project| project.read(cx).worktree_for_id(id, cx)) - { - worktree = Some(inner_worktree); - } - } - - if let Some(project) = editor.project() { - if cx.has_flag::() { - let zeta = zeta2::Zeta::global(client, &user_store, cx); - let provider = cx.new(|cx| { - zeta2::ZetaEditPredictionProvider::new( - project.clone(), - &client, - &user_store, - cx, - ) - }); - - // TODO [zeta2] handle multibuffers - if let Some(buffer) = &singleton_buffer - && buffer.read(cx).file().is_some() + value @ (EditPredictionProvider::Experimental(_) | EditPredictionProvider::Zed) => { + let ep_store = edit_prediction::EditPredictionStore::global(client, &user_store, cx); + + if let Some(project) = editor.project() + && let Some(buffer) = &singleton_buffer + && buffer.read(cx).file().is_some() + { + let has_model = ep_store.update(cx, |ep_store, cx| { + let model = if let EditPredictionProvider::Experimental(name) = value { + if name == EXPERIMENTAL_SWEEP_EDIT_PREDICTION_PROVIDER_NAME + && cx.has_flag::() { - zeta.update(cx, |zeta, cx| { - zeta.register_buffer(buffer, project, cx); - }); - } - - editor.set_edit_prediction_provider(Some(provider), window, cx); - } else { - let zeta = zeta::Zeta::register(worktree, client.clone(), user_store, cx); - - if let Some(buffer) = &singleton_buffer - && buffer.read(cx).file().is_some() + edit_prediction::EditPredictionModel::Sweep + } else if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME + && cx.has_flag::() + { + edit_prediction::EditPredictionModel::Zeta2 + } else if name == EXPERIMENTAL_MERCURY_EDIT_PREDICTION_PROVIDER_NAME + && cx.has_flag::() { - zeta.update(cx, |zeta, cx| { - zeta.register_buffer(buffer, project, cx); - }); + edit_prediction::EditPredictionModel::Mercury + } else { + return false; } - - let provider = cx.new(|_| { - zeta::ZetaEditPredictionProvider::new( - zeta, - project.clone(), - singleton_buffer, - ) - }); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } + } else if user_store.read(cx).current_user().is_some() { + edit_prediction::EditPredictionModel::Zeta1 + } else { + return false; + }; + + ep_store.set_edit_prediction_model(model); + ep_store.register_buffer(buffer, project, cx); + true + }); + + if has_model { + let provider = cx.new(|cx| { + ZedEditPredictionDelegate::new( + project.clone(), + singleton_buffer, + &client, + &user_store, + cx, + ) + }); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } } diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 2a9ca48c09cfb7a6abf3ff1302f905e6b4e35abe..abe18ff85e69f29c8c13dd6d860c19fb9e721105 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -3,13 +3,14 @@ use crate::restorable_workspace_locations; use anyhow::{Context as _, Result, anyhow}; use cli::{CliRequest, CliResponse, ipc::IpcSender}; use cli::{IpcHandshake, ipc}; -use client::parse_zed_link; +use client::{ZedLink, parse_zed_link}; use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use editor::Editor; use fs::Fs; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::{mpsc, oneshot}; +use futures::future; use futures::future::join_all; use futures::{FutureExt, SinkExt, StreamExt}; use git_ui::file_diff_view::FileDiffView; @@ -54,12 +55,15 @@ pub enum OpenRequestKind { schema_path: String, }, Setting { - // None just opens settings without navigating to a specific path + /// `None` opens settings without navigating to a specific path. setting_path: Option, }, GitClone { repo_url: String, }, + GitCommit { + sha: String, + }, } impl OpenRequest { @@ -118,10 +122,22 @@ impl OpenRequest { .unwrap_or_else(|_| repo_url.into()) .to_string(), }); + } else if let Some(commit_path) = url.strip_prefix("zed://git/commit/") { + this.parse_git_commit_url(commit_path)? } else if url.starts_with("ssh://") { this.parse_ssh_file_path(&url, cx)? - } else if let Some(request_path) = parse_zed_link(&url, cx) { - this.parse_request_path(request_path).log_err(); + } else if let Some(zed_link) = parse_zed_link(&url, cx) { + match zed_link { + ZedLink::Channel { channel_id } => { + this.join_channel = Some(channel_id); + } + ZedLink::ChannelNotes { + channel_id, + heading, + } => { + this.open_channel_notes.push((channel_id, heading)); + } + } } else { log::error!("unhandled url: {}", url); } @@ -136,6 +152,28 @@ impl OpenRequest { } } + fn parse_git_commit_url(&mut self, commit_path: &str) -> Result<()> { + // Format: ?repo= + let (sha, query) = commit_path + .split_once('?') + .context("invalid git commit url: missing query string")?; + anyhow::ensure!(!sha.is_empty(), "invalid git commit url: missing sha"); + + let repo = url::form_urlencoded::parse(query.as_bytes()) + .find_map(|(key, value)| (key == "repo").then_some(value)) + .filter(|s| !s.is_empty()) + .context("invalid git commit url: missing repo query parameter")? + .to_string(); + + self.open_paths.push(repo); + + self.kind = Some(OpenRequestKind::GitCommit { + sha: sha.to_string(), + }); + + Ok(()) + } + fn parse_ssh_file_path(&mut self, file: &str, cx: &App) -> Result<()> { let url = url::Url::parse(file)?; let host = url @@ -165,31 +203,6 @@ impl OpenRequest { self.parse_file_path(url.path()); Ok(()) } - - fn parse_request_path(&mut self, request_path: &str) -> Result<()> { - let mut parts = request_path.split('/'); - if parts.next() == Some("channel") - && let Some(slug) = parts.next() - && let Some(id_str) = slug.split('-').next_back() - && let Ok(channel_id) = id_str.parse::() - { - let Some(next) = parts.next() else { - self.join_channel = Some(channel_id); - return Ok(()); - }; - - if let Some(heading) = next.strip_prefix("notes#") { - self.open_channel_notes - .push((channel_id, Some(heading.to_string()))); - return Ok(()); - } - if next == "notes" { - self.open_channel_notes.push((channel_id, None)); - return Ok(()); - } - } - anyhow::bail!("invalid zed url: {request_path}") - } } #[derive(Clone)] @@ -523,33 +536,27 @@ async fn open_local_workspace( app_state: &Arc, cx: &mut AsyncApp, ) -> bool { - let mut errored = false; - let paths_with_position = derive_paths_with_position(app_state.fs.as_ref(), workspace_paths).await; - // Handle reuse flag by finding existing window to replace - let replace_window = if reuse { - cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()) - .ok() - .flatten() - } else { - None - }; - - // For reuse, force new workspace creation but with replace_window set - let effective_open_new_workspace = if reuse { - Some(true) + // If reuse flag is passed, open a new workspace in an existing window. + let (open_new_workspace, replace_window) = if reuse { + ( + Some(true), + cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()) + .ok() + .flatten(), + ) } else { - open_new_workspace + (open_new_workspace, None) }; - match open_paths_with_positions( + let (workspace, items) = match open_paths_with_positions( &paths_with_position, &diff_paths, app_state.clone(), workspace::OpenOptions { - open_new_workspace: effective_open_new_workspace, + open_new_workspace, replace_window, prefer_focused_window: wait, env: env.cloned(), @@ -559,80 +566,95 @@ async fn open_local_workspace( ) .await { - Ok((workspace, items)) => { - let mut item_release_futures = Vec::new(); + Ok(result) => result, + Err(error) => { + responses + .send(CliResponse::Stderr { + message: format!("error opening {paths_with_position:?}: {error}"), + }) + .log_err(); + return true; + } + }; - for item in items { - match item { - Some(Ok(item)) => { - cx.update(|cx| { - let released = oneshot::channel(); - item.on_release( - cx, - Box::new(move |_| { - let _ = released.0.send(()); - }), - ) - .detach(); - item_release_futures.push(released.1); - }) - .log_err(); - } - Some(Err(err)) => { - responses - .send(CliResponse::Stderr { - message: err.to_string(), - }) - .log_err(); - errored = true; - } - None => {} - } + let mut errored = false; + let mut item_release_futures = Vec::new(); + let mut subscriptions = Vec::new(); + + // If --wait flag is used with no paths, or a directory, then wait until + // the entire workspace is closed. + if wait { + let mut wait_for_window_close = paths_with_position.is_empty() && diff_paths.is_empty(); + for path_with_position in &paths_with_position { + if app_state.fs.is_dir(&path_with_position.path).await { + wait_for_window_close = true; + break; } + } + + if wait_for_window_close { + let (release_tx, release_rx) = oneshot::channel(); + item_release_futures.push(release_rx); + subscriptions.push(workspace.update(cx, |_, _, cx| { + cx.on_release(move |_, _| { + let _ = release_tx.send(()); + }) + })); + } + } - if wait { - let background = cx.background_executor().clone(); - let wait = async move { - if paths_with_position.is_empty() && diff_paths.is_empty() { - let (done_tx, done_rx) = oneshot::channel(); - let _subscription = workspace.update(cx, |_, _, cx| { - cx.on_release(move |_, _| { - let _ = done_tx.send(()); - }) - }); - let _ = done_rx.await; - } else { - let _ = futures::future::try_join_all(item_release_futures).await; - }; + for item in items { + match item { + Some(Ok(item)) => { + if wait { + let (release_tx, release_rx) = oneshot::channel(); + item_release_futures.push(release_rx); + subscriptions.push(cx.update(|cx| { + item.on_release( + cx, + Box::new(move |_| { + release_tx.send(()).ok(); + }), + ) + })); } - .fuse(); - - futures::pin_mut!(wait); - - loop { - // Repeatedly check if CLI is still open to avoid wasting resources - // waiting for files or workspaces to close. - let mut timer = background.timer(Duration::from_secs(1)).fuse(); - futures::select_biased! { - _ = wait => break, - _ = timer => { - if responses.send(CliResponse::Ping).is_err() { - break; - } - } + } + Some(Err(err)) => { + responses + .send(CliResponse::Stderr { + message: err.to_string(), + }) + .log_err(); + errored = true; + } + None => {} + } + } + + if wait { + let wait = async move { + let _subscriptions = subscriptions; + let _ = future::try_join_all(item_release_futures).await; + } + .fuse(); + futures::pin_mut!(wait); + + let background = cx.background_executor().clone(); + loop { + // Repeatedly check if CLI is still open to avoid wasting resources + // waiting for files or workspaces to close. + let mut timer = background.timer(Duration::from_secs(1)).fuse(); + futures::select_biased! { + _ = wait => break, + _ = timer => { + if responses.send(CliResponse::Ping).is_err() { + break; } } } } - Err(error) => { - errored = true; - responses - .send(CliResponse::Stderr { - message: format!("error opening {paths_with_position:?}: {error}"), - }) - .log_err(); - } } + errored } @@ -662,12 +684,13 @@ mod tests { ipc::{self}, }; use editor::Editor; - use gpui::TestAppContext; + use futures::poll; + use gpui::{AppContext as _, TestAppContext}; use language::LineEnding; use remote::SshConnectionOptions; use rope::Rope; use serde_json::json; - use std::sync::Arc; + use std::{sync::Arc, task::Poll}; use util::path; use workspace::{AppState, Workspace}; @@ -695,11 +718,92 @@ mod tests { port_forwards: None, nickname: None, upload_binary_over_ssh: false, + connection_timeout: None, }) ); assert_eq!(request.open_paths, vec!["/"]); } + #[gpui::test] + fn test_parse_git_commit_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + // Test basic git commit URL + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://git/commit/abc123?repo=path/to/repo".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind.unwrap() { + OpenRequestKind::GitCommit { sha } => { + assert_eq!(sha, "abc123"); + } + _ => panic!("expected GitCommit variant"), + } + // Verify path was added to open_paths for workspace routing + assert_eq!(request.open_paths, vec!["path/to/repo"]); + + // Test with URL encoded path + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://git/commit/def456?repo=path%20with%20spaces".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind.unwrap() { + OpenRequestKind::GitCommit { sha } => { + assert_eq!(sha, "def456"); + } + _ => panic!("expected GitCommit variant"), + } + assert_eq!(request.open_paths, vec!["path with spaces"]); + + // Test with empty path + cx.update(|cx| { + assert!( + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://git/commit/abc123?repo=".into()], + ..Default::default() + }, + cx, + ) + .unwrap_err() + .to_string() + .contains("missing repo") + ); + }); + + // Test error case: missing SHA + let result = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://git/commit/abc123?foo=bar".into()], + ..Default::default() + }, + cx, + ) + }); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("missing repo query parameter") + ); + } + #[gpui::test] async fn test_open_workspace_with_directory(cx: &mut TestAppContext) { let app_state = init_test(cx); @@ -762,6 +866,60 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_wait_with_directory_waits_for_window_close(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/root"), + json!({ + "dir1": { + "file1.txt": "content1", + }, + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let workspace_paths = vec![path!("/root/dir1").to_owned()]; + + let (done_tx, mut done_rx) = futures::channel::oneshot::channel(); + cx.spawn({ + let app_state = app_state.clone(); + move |mut cx| async move { + let errored = open_local_workspace( + workspace_paths, + vec![], + None, + false, + true, + &response_tx, + None, + &app_state, + &mut cx, + ) + .await; + let _ = done_tx.send(errored); + } + }) + .detach(); + + cx.background_executor.run_until_parked(); + assert_eq!(cx.windows().len(), 1); + assert!(matches!(poll!(&mut done_rx), Poll::Pending)); + + let window = cx.windows()[0]; + cx.update_window(window, |_, window, _| window.remove_window()) + .unwrap(); + cx.background_executor.run_until_parked(); + + let errored = done_rx.await.unwrap(); + assert!(!errored); + } + #[gpui::test] async fn test_open_workspace_with_nonexistent_files(cx: &mut TestAppContext) { let app_state = init_test(cx); diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index 402881680232ea636f7cb105db759f417a435145..2a52cc697249cb1f8eb280a48c89ff5aadf6fd85 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -174,17 +174,13 @@ impl Render for QuickActionBar { .as_ref() .is_some_and(|menu| matches!(menu.origin(), ContextMenuOrigin::QuickActionBar)) }; - let code_action_element = if is_deployed { - editor.update(cx, |editor, cx| { - if let Some(style) = editor.style() { - editor.render_context_menu(style, MAX_CODE_ACTION_MENU_LINES, window, cx) - } else { - None - } + let code_action_element = is_deployed + .then(|| { + editor.update(cx, |editor, cx| { + editor.render_context_menu(MAX_CODE_ACTION_MENU_LINES, window, cx) + }) }) - } else { - None - }; + .flatten(); v_flex() .child( IconButton::new("toggle_code_actions_icon", IconName::BoltOutlined) diff --git a/crates/zed/src/zed/quick_action_bar/preview.rs b/crates/zed/src/zed/quick_action_bar/preview.rs index 630d243cf6971ecebda694091acbfd5ba4c049e4..5d43e79542357977b06fbbd884472f94ad3595c8 100644 --- a/crates/zed/src/zed/quick_action_bar/preview.rs +++ b/crates/zed/src/zed/quick_action_bar/preview.rs @@ -32,7 +32,7 @@ impl QuickActionBar { .is_some() { preview_type = Some(PreviewType::Markdown); - } else if SvgPreviewView::resolve_active_item_as_svg_editor(workspace, cx).is_some() + } else if SvgPreviewView::resolve_active_item_as_svg_buffer(workspace, cx).is_some() { preview_type = Some(PreviewType::Svg); } diff --git a/crates/zed/src/zed/quick_action_bar/repl_menu.rs b/crates/zed/src/zed/quick_action_bar/repl_menu.rs index 5210bb718c0663d2c256f865f0fcabf41bd5708f..1ebdf35bb93824b7881afabe289a07feb93f8135 100644 --- a/crates/zed/src/zed/quick_action_bar/repl_menu.rs +++ b/crates/zed/src/zed/quick_action_bar/repl_menu.rs @@ -388,16 +388,55 @@ fn session_state(session: Entity, cx: &mut App) -> ReplMenuState { } }; - match &session.kernel { - Kernel::Restarting => ReplMenuState { - tooltip: format!("Restarting {}", kernel_name).into(), - icon_is_animating: true, - popover_disabled: true, + let transitional = + |tooltip: SharedString, animating: bool, popover_disabled: bool| ReplMenuState { + tooltip, + icon_is_animating: animating, + popover_disabled, icon_color: Color::Muted, indicator: Some(Indicator::dot().color(Color::Muted)), status: session.kernel.status(), ..fill_fields() - }, + }; + + let starting = || transitional(format!("{} is starting", kernel_name).into(), true, true); + let restarting = || transitional(format!("Restarting {}", kernel_name).into(), true, true); + let shutting_down = || { + transitional( + format!("{} is shutting down", kernel_name).into(), + false, + true, + ) + }; + let auto_restarting = || { + transitional( + format!("Auto-restarting {}", kernel_name).into(), + true, + true, + ) + }; + let unknown = || transitional(format!("{} state unknown", kernel_name).into(), false, true); + let other = |state: &str| { + transitional( + format!("{} state: {}", kernel_name, state).into(), + false, + true, + ) + }; + + let shutdown = || ReplMenuState { + tooltip: "Nothing running".into(), + icon: IconName::ReplNeutral, + icon_color: Color::Default, + icon_is_animating: false, + popover_disabled: false, + indicator: None, + status: KernelStatus::Shutdown, + ..fill_fields() + }; + + match &session.kernel { + Kernel::Restarting => restarting(), Kernel::RunningKernel(kernel) => match &kernel.execution_state() { ExecutionState::Idle => ReplMenuState { tooltip: format!("Run code on {} ({})", kernel_name, kernel_language).into(), @@ -413,16 +452,15 @@ fn session_state(session: Entity, cx: &mut App) -> ReplMenuState { status: session.kernel.status(), ..fill_fields() }, + ExecutionState::Unknown => unknown(), + ExecutionState::Starting => starting(), + ExecutionState::Restarting => restarting(), + ExecutionState::Terminating => shutting_down(), + ExecutionState::AutoRestarting => auto_restarting(), + ExecutionState::Dead => shutdown(), + ExecutionState::Other(state) => other(state), }, - Kernel::StartingKernel(_) => ReplMenuState { - tooltip: format!("{} is starting", kernel_name).into(), - icon_is_animating: true, - popover_disabled: true, - icon_color: Color::Muted, - indicator: Some(Indicator::dot().color(Color::Muted)), - status: session.kernel.status(), - ..fill_fields() - }, + Kernel::StartingKernel(_) => starting(), Kernel::ErroredLaunch(e) => ReplMenuState { tooltip: format!("Error with kernel {}: {}", kernel_name, e).into(), popover_disabled: false, @@ -430,23 +468,7 @@ fn session_state(session: Entity, cx: &mut App) -> ReplMenuState { status: session.kernel.status(), ..fill_fields() }, - Kernel::ShuttingDown => ReplMenuState { - tooltip: format!("{} is shutting down", kernel_name).into(), - popover_disabled: true, - icon_color: Color::Muted, - indicator: Some(Indicator::dot().color(Color::Muted)), - status: session.kernel.status(), - ..fill_fields() - }, - Kernel::Shutdown => ReplMenuState { - tooltip: "Nothing running".into(), - icon: IconName::ReplNeutral, - icon_color: Color::Default, - icon_is_animating: false, - popover_disabled: false, - indicator: None, - status: KernelStatus::Shutdown, - ..fill_fields() - }, + Kernel::ShuttingDown => shutting_down(), + Kernel::Shutdown => shutdown(), } } diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index c0739c74c7ac6c103e34c7a2cd730096503ef565..458ca10ecdf8915eef3ee69c6334b1a14cc0c219 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -43,6 +43,9 @@ actions!( /// Opens the settings JSON file. #[action(deprecated_aliases = ["zed_actions::OpenSettings"])] OpenSettingsFile, + /// Opens project-specific settings. + #[action(deprecated_aliases = ["zed_actions::OpenProjectSettings"])] + OpenProjectSettings, /// Opens the default keymap file. OpenDefaultKeymap, /// Opens the user keymap file. @@ -65,6 +68,10 @@ actions!( OpenLicenses, /// Opens the telemetry log. OpenTelemetryLog, + /// Opens the performance profiler. + OpenPerformanceProfiler, + /// Opens the onboarding view. + OpenOnboarding, ] ); @@ -210,6 +217,10 @@ pub mod git { Switch, /// Selects a different repository. SelectRepo, + /// Filter remotes. + FilterRemotes, + /// Create a git remote. + CreateRemote, /// Opens the git branch selector. #[action(deprecated_aliases = ["branches::OpenRecent"])] Branch, @@ -245,6 +256,17 @@ pub mod command_palette { ); } +pub mod project_panel { + use gpui::actions; + + actions!( + project_panel, + [ + /// Toggles focus on the project panel. + ToggleFocus + ] + ); +} pub mod feedback { use gpui::actions; @@ -330,6 +352,8 @@ pub mod agent { AddSelectionToThread, /// Resets the agent panel zoom levels (agent UI and buffer font sizes). ResetAgentZoom, + /// Toggles the utility/agent pane open/closed state. + ToggleAgentPane, ] ); } @@ -408,6 +432,12 @@ pub struct OpenRemote { pub create_new_window: bool, } +/// Opens the dev container connection modal. +#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)] +#[action(namespace = projects)] +#[serde(deny_unknown_fields)] +pub struct OpenDevContainer; + /// Where to spawn the task in the UI. #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -527,6 +557,18 @@ actions!( ] ); +pub mod vim { + use gpui::actions; + + actions!( + vim, + [ + /// Opens the default keymap file. + OpenDefaultKeymap + ] + ); +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WslConnectionOptions { pub distro_name: String, diff --git a/crates/zed_env_vars/src/zed_env_vars.rs b/crates/zed_env_vars/src/zed_env_vars.rs index 53b9c22bb207e81831d1d9ae6087d1a297331d3f..e601cc9536602ac943bd76bf1bfd8b8ac8979dd9 100644 --- a/crates/zed_env_vars/src/zed_env_vars.rs +++ b/crates/zed_env_vars/src/zed_env_vars.rs @@ -5,6 +5,7 @@ use std::sync::LazyLock; /// When true, Zed will use in-memory databases instead of persistent storage. pub static ZED_STATELESS: LazyLock = bool_env_var!("ZED_STATELESS"); +#[derive(Clone)] pub struct EnvVar { pub name: SharedString, /// Value of the environment variable. Also `None` when set to an empty string. @@ -30,7 +31,7 @@ impl EnvVar { #[macro_export] macro_rules! env_var { ($name:expr) => { - LazyLock::new(|| $crate::EnvVar::new(($name).into())) + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) }; } @@ -39,6 +40,6 @@ macro_rules! env_var { #[macro_export] macro_rules! bool_env_var { ($name:expr) => { - LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) }; } diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml deleted file mode 100644 index 821d3e0b9e7a5ff37302cf613f4e09b047f121f1..0000000000000000000000000000000000000000 --- a/crates/zeta/Cargo.toml +++ /dev/null @@ -1,83 +0,0 @@ -[package] -name = "zeta" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" -exclude = ["fixtures"] - -[lints] -workspace = true - -[lib] -path = "src/zeta.rs" -doctest = false - -[features] -test-support = [] - -[dependencies] -ai_onboarding.workspace = true -anyhow.workspace = true -arrayvec.workspace = true -client.workspace = true -cloud_llm_client.workspace = true -collections.workspace = true -command_palette_hooks.workspace = true -copilot.workspace = true -db.workspace = true -edit_prediction.workspace = true -editor.workspace = true -feature_flags.workspace = true -fs.workspace = true -futures.workspace = true -gpui.workspace = true -http_client.workspace = true -indoc.workspace = true -itertools.workspace = true -language.workspace = true -language_model.workspace = true -log.workspace = true -menu.workspace = true -postage.workspace = true -project.workspace = true -rand.workspace = true -regex.workspace = true -release_channel.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -strum.workspace = true -telemetry.workspace = true -telemetry_events.workspace = true -theme.workspace = true -thiserror.workspace = true -ui.workspace = true -util.workspace = true -uuid.workspace = true -workspace.workspace = true -worktree.workspace = true -zed_actions.workspace = true - -[dev-dependencies] -call = { workspace = true, features = ["test-support"] } -client = { workspace = true, features = ["test-support"] } -clock = { workspace = true, features = ["test-support"] } -cloud_api_types.workspace = true -collections = { workspace = true, features = ["test-support"] } -ctor.workspace = true -editor = { workspace = true, features = ["test-support"] } -gpui = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } -indoc.workspace = true -language = { workspace = true, features = ["test-support"] } -parking_lot.workspace = true -reqwest_client = { workspace = true, features = ["test-support"] } -rpc = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } -theme = { workspace = true, features = ["test-support"] } -tree-sitter-go.workspace = true -tree-sitter-rust.workspace = true -workspace = { workspace = true, features = ["test-support"] } -worktree = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/zeta/src/completion_diff_element.rs b/crates/zeta/src/completion_diff_element.rs deleted file mode 100644 index 73c3cb20cd7de5da92fbf6e5a32a8ca8d42a5933..0000000000000000000000000000000000000000 --- a/crates/zeta/src/completion_diff_element.rs +++ /dev/null @@ -1,173 +0,0 @@ -use std::cmp; - -use crate::EditPrediction; -use gpui::{ - AnyElement, App, BorderStyle, Bounds, Corners, Edges, HighlightStyle, Hsla, StyledText, - TextLayout, TextStyle, point, prelude::*, quad, size, -}; -use language::OffsetRangeExt; -use settings::Settings; -use theme::ThemeSettings; -use ui::prelude::*; - -pub struct CompletionDiffElement { - element: AnyElement, - text_layout: TextLayout, - cursor_offset: usize, -} - -impl CompletionDiffElement { - pub fn new(completion: &EditPrediction, cx: &App) -> Self { - let mut diff = completion - .snapshot - .text_for_range(completion.excerpt_range.clone()) - .collect::(); - - let mut cursor_offset_in_diff = None; - let mut delta = 0; - let mut diff_highlights = Vec::new(); - for (old_range, new_text) in completion.edits.iter() { - let old_range = old_range.to_offset(&completion.snapshot); - - if cursor_offset_in_diff.is_none() && completion.cursor_offset <= old_range.end { - cursor_offset_in_diff = - Some(completion.cursor_offset - completion.excerpt_range.start + delta); - } - - let old_start_in_diff = old_range.start - completion.excerpt_range.start + delta; - let old_end_in_diff = old_range.end - completion.excerpt_range.start + delta; - if old_start_in_diff < old_end_in_diff { - diff_highlights.push(( - old_start_in_diff..old_end_in_diff, - HighlightStyle { - background_color: Some(cx.theme().status().deleted_background), - strikethrough: Some(gpui::StrikethroughStyle { - thickness: px(1.), - color: Some(cx.theme().colors().text_muted), - }), - ..Default::default() - }, - )); - } - - if !new_text.is_empty() { - diff.insert_str(old_end_in_diff, new_text); - diff_highlights.push(( - old_end_in_diff..old_end_in_diff + new_text.len(), - HighlightStyle { - background_color: Some(cx.theme().status().created_background), - ..Default::default() - }, - )); - delta += new_text.len(); - } - } - - let cursor_offset_in_diff = cursor_offset_in_diff - .unwrap_or_else(|| completion.cursor_offset - completion.excerpt_range.start + delta); - - let settings = ThemeSettings::get_global(cx).clone(); - let text_style = TextStyle { - color: cx.theme().colors().editor_foreground, - font_size: settings.buffer_font_size(cx).into(), - font_family: settings.buffer_font.family, - font_features: settings.buffer_font.features, - font_fallbacks: settings.buffer_font.fallbacks, - line_height: relative(settings.buffer_line_height.value()), - font_weight: settings.buffer_font.weight, - font_style: settings.buffer_font.style, - ..Default::default() - }; - let element = StyledText::new(diff).with_default_highlights(&text_style, diff_highlights); - let text_layout = element.layout().clone(); - - CompletionDiffElement { - element: element.into_any_element(), - text_layout, - cursor_offset: cursor_offset_in_diff, - } - } -} - -impl IntoElement for CompletionDiffElement { - type Element = Self; - - fn into_element(self) -> Self { - self - } -} - -impl Element for CompletionDiffElement { - type RequestLayoutState = (); - type PrepaintState = (); - - fn id(&self) -> Option { - None - } - - fn source_location(&self) -> Option<&'static core::panic::Location<'static>> { - None - } - - fn request_layout( - &mut self, - _id: Option<&gpui::GlobalElementId>, - _inspector_id: Option<&gpui::InspectorElementId>, - window: &mut Window, - cx: &mut App, - ) -> (gpui::LayoutId, Self::RequestLayoutState) { - (self.element.request_layout(window, cx), ()) - } - - fn prepaint( - &mut self, - _id: Option<&gpui::GlobalElementId>, - _inspector_id: Option<&gpui::InspectorElementId>, - _bounds: gpui::Bounds, - _request_layout: &mut Self::RequestLayoutState, - window: &mut Window, - cx: &mut App, - ) -> Self::PrepaintState { - self.element.prepaint(window, cx); - } - - fn paint( - &mut self, - _id: Option<&gpui::GlobalElementId>, - _inspector_id: Option<&gpui::InspectorElementId>, - _bounds: gpui::Bounds, - _request_layout: &mut Self::RequestLayoutState, - _prepaint: &mut Self::PrepaintState, - window: &mut Window, - cx: &mut App, - ) { - if let Some(position) = self.text_layout.position_for_index(self.cursor_offset) { - let bounds = self.text_layout.bounds(); - let line_height = self.text_layout.line_height(); - let line_width = self - .text_layout - .line_layout_for_index(self.cursor_offset) - .map_or(bounds.size.width, |layout| layout.width()); - window.paint_quad(quad( - Bounds::new( - point(bounds.origin.x, position.y), - size(cmp::max(bounds.size.width, line_width), line_height), - ), - Corners::default(), - cx.theme().colors().editor_active_line_background, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - self.element.paint(window, cx); - window.paint_quad(quad( - Bounds::new(position, size(px(2.), line_height)), - Corners::default(), - cx.theme().players().local().cursor, - Edges::default(), - Hsla::transparent_black(), - BorderStyle::default(), - )); - } - } -} diff --git a/crates/zeta/src/init.rs b/crates/zeta/src/init.rs deleted file mode 100644 index 0167d878fa34976d7175a64269d9dfe29d18d8fe..0000000000000000000000000000000000000000 --- a/crates/zeta/src/init.rs +++ /dev/null @@ -1,110 +0,0 @@ -use std::any::{Any, TypeId}; - -use command_palette_hooks::CommandPaletteFilter; -use feature_flags::{FeatureFlagAppExt as _, PredictEditsRateCompletionsFeatureFlag}; -use gpui::actions; -use language::language_settings::EditPredictionProvider; -use project::DisableAiSettings; -use settings::{Settings, SettingsStore, update_settings_file}; -use ui::App; -use workspace::Workspace; - -use crate::{RateCompletionModal, onboarding_modal::ZedPredictModal}; - -actions!( - edit_prediction, - [ - /// Resets the edit prediction onboarding state. - ResetOnboarding, - /// Opens the rate completions modal. - RateCompletions - ] -); - -pub fn init(cx: &mut App) { - feature_gate_predict_edits_actions(cx); - - cx.observe_new(move |workspace: &mut Workspace, _, _cx| { - workspace.register_action(|workspace, _: &RateCompletions, window, cx| { - if cx.has_flag::() { - RateCompletionModal::toggle(workspace, window, cx); - } - }); - - workspace.register_action( - move |workspace, _: &zed_actions::OpenZedPredictOnboarding, window, cx| { - ZedPredictModal::toggle( - workspace, - workspace.user_store().clone(), - workspace.client().clone(), - window, - cx, - ) - }, - ); - - workspace.register_action(|workspace, _: &ResetOnboarding, _window, cx| { - update_settings_file(workspace.app_state().fs.clone(), cx, move |settings, _| { - settings - .project - .all_languages - .features - .get_or_insert_default() - .edit_prediction_provider = Some(EditPredictionProvider::None) - }); - }); - }) - .detach(); -} - -fn feature_gate_predict_edits_actions(cx: &mut App) { - let rate_completion_action_types = [TypeId::of::()]; - let reset_onboarding_action_types = [TypeId::of::()]; - let zeta_all_action_types = [ - TypeId::of::(), - TypeId::of::(), - zed_actions::OpenZedPredictOnboarding.type_id(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - TypeId::of::(), - ]; - - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.hide_action_types(&rate_completion_action_types); - filter.hide_action_types(&reset_onboarding_action_types); - filter.hide_action_types(&[zed_actions::OpenZedPredictOnboarding.type_id()]); - }); - - cx.observe_global::(move |cx| { - let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; - let has_feature_flag = cx.has_flag::(); - - CommandPaletteFilter::update_global(cx, |filter, _cx| { - if is_ai_disabled { - filter.hide_action_types(&zeta_all_action_types); - } else if has_feature_flag { - filter.show_action_types(&rate_completion_action_types); - } else { - filter.hide_action_types(&rate_completion_action_types); - } - }); - }) - .detach(); - - cx.observe_flag::(move |is_enabled, cx| { - if !DisableAiSettings::get_global(cx).disable_ai { - if is_enabled { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.show_action_types(&rate_completion_action_types); - }); - } else { - CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.hide_action_types(&rate_completion_action_types); - }); - } - } - }) - .detach(); -} diff --git a/crates/zeta/src/input_excerpt.rs b/crates/zeta/src/input_excerpt.rs deleted file mode 100644 index 06bff5b1bea0f099b2ccd98605ac5de5bb5e6360..0000000000000000000000000000000000000000 --- a/crates/zeta/src/input_excerpt.rs +++ /dev/null @@ -1,229 +0,0 @@ -use crate::{ - CURSOR_MARKER, EDITABLE_REGION_END_MARKER, EDITABLE_REGION_START_MARKER, START_OF_FILE_MARKER, - guess_token_count, -}; -use language::{BufferSnapshot, Point}; -use std::{fmt::Write, ops::Range}; - -#[derive(Debug)] -pub struct InputExcerpt { - pub editable_range: Range, - pub prompt: String, -} - -pub fn excerpt_for_cursor_position( - position: Point, - path: &str, - snapshot: &BufferSnapshot, - editable_region_token_limit: usize, - context_token_limit: usize, -) -> InputExcerpt { - let mut scope_range = position..position; - let mut remaining_edit_tokens = editable_region_token_limit; - - while let Some(parent) = snapshot.syntax_ancestor(scope_range.clone()) { - let parent_tokens = guess_token_count(parent.byte_range().len()); - let parent_point_range = Point::new( - parent.start_position().row as u32, - parent.start_position().column as u32, - ) - ..Point::new( - parent.end_position().row as u32, - parent.end_position().column as u32, - ); - if parent_point_range == scope_range { - break; - } else if parent_tokens <= editable_region_token_limit { - scope_range = parent_point_range; - remaining_edit_tokens = editable_region_token_limit - parent_tokens; - } else { - break; - } - } - - let editable_range = expand_range(snapshot, scope_range, remaining_edit_tokens); - let context_range = expand_range(snapshot, editable_range.clone(), context_token_limit); - - let mut prompt = String::new(); - - writeln!(&mut prompt, "```{path}").unwrap(); - if context_range.start == Point::zero() { - writeln!(&mut prompt, "{START_OF_FILE_MARKER}").unwrap(); - } - - for chunk in snapshot.chunks(context_range.start..editable_range.start, false) { - prompt.push_str(chunk.text); - } - - push_editable_range(position, snapshot, editable_range.clone(), &mut prompt); - - for chunk in snapshot.chunks(editable_range.end..context_range.end, false) { - prompt.push_str(chunk.text); - } - write!(prompt, "\n```").unwrap(); - - InputExcerpt { - editable_range, - prompt, - } -} - -fn push_editable_range( - cursor_position: Point, - snapshot: &BufferSnapshot, - editable_range: Range, - prompt: &mut String, -) { - writeln!(prompt, "{EDITABLE_REGION_START_MARKER}").unwrap(); - for chunk in snapshot.chunks(editable_range.start..cursor_position, false) { - prompt.push_str(chunk.text); - } - prompt.push_str(CURSOR_MARKER); - for chunk in snapshot.chunks(cursor_position..editable_range.end, false) { - prompt.push_str(chunk.text); - } - write!(prompt, "\n{EDITABLE_REGION_END_MARKER}").unwrap(); -} - -fn expand_range( - snapshot: &BufferSnapshot, - range: Range, - mut remaining_tokens: usize, -) -> Range { - let mut expanded_range = range; - expanded_range.start.column = 0; - expanded_range.end.column = snapshot.line_len(expanded_range.end.row); - loop { - let mut expanded = false; - - if remaining_tokens > 0 && expanded_range.start.row > 0 { - expanded_range.start.row -= 1; - let line_tokens = - guess_token_count(snapshot.line_len(expanded_range.start.row) as usize); - remaining_tokens = remaining_tokens.saturating_sub(line_tokens); - expanded = true; - } - - if remaining_tokens > 0 && expanded_range.end.row < snapshot.max_point().row { - expanded_range.end.row += 1; - expanded_range.end.column = snapshot.line_len(expanded_range.end.row); - let line_tokens = guess_token_count(expanded_range.end.column as usize); - remaining_tokens = remaining_tokens.saturating_sub(line_tokens); - expanded = true; - } - - if !expanded { - break; - } - } - expanded_range -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::{App, AppContext}; - use indoc::indoc; - use language::{Buffer, Language, LanguageConfig, LanguageMatcher}; - use std::sync::Arc; - - #[gpui::test] - fn test_excerpt_for_cursor_position(cx: &mut App) { - let text = indoc! {r#" - fn foo() { - let x = 42; - println!("Hello, world!"); - } - - fn bar() { - let x = 42; - let mut sum = 0; - for i in 0..x { - sum += i; - } - println!("Sum: {}", sum); - return sum; - } - - fn generate_random_numbers() -> Vec { - let mut rng = rand::thread_rng(); - let mut numbers = Vec::new(); - for _ in 0..5 { - numbers.push(rng.random_range(1..101)); - } - numbers - } - "#}; - let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); - let snapshot = buffer.read(cx).snapshot(); - - // Ensure we try to fit the largest possible syntax scope, resorting to line-based expansion - // when a larger scope doesn't fit the editable region. - let excerpt = excerpt_for_cursor_position(Point::new(12, 5), "main.rs", &snapshot, 50, 32); - assert_eq!( - excerpt.prompt, - indoc! {r#" - ```main.rs - let x = 42; - println!("Hello, world!"); - <|editable_region_start|> - } - - fn bar() { - let x = 42; - let mut sum = 0; - for i in 0..x { - sum += i; - } - println!("Sum: {}", sum); - r<|user_cursor_is_here|>eturn sum; - } - - fn generate_random_numbers() -> Vec { - <|editable_region_end|> - let mut rng = rand::thread_rng(); - let mut numbers = Vec::new(); - ```"#} - ); - - // The `bar` function won't fit within the editable region, so we resort to line-based expansion. - let excerpt = excerpt_for_cursor_position(Point::new(12, 5), "main.rs", &snapshot, 40, 32); - assert_eq!( - excerpt.prompt, - indoc! {r#" - ```main.rs - fn bar() { - let x = 42; - let mut sum = 0; - <|editable_region_start|> - for i in 0..x { - sum += i; - } - println!("Sum: {}", sum); - r<|user_cursor_is_here|>eturn sum; - } - - fn generate_random_numbers() -> Vec { - let mut rng = rand::thread_rng(); - <|editable_region_end|> - let mut numbers = Vec::new(); - for _ in 0..5 { - numbers.push(rng.random_range(1..101)); - ```"#} - ); - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - } -} diff --git a/crates/zeta/src/onboarding_telemetry.rs b/crates/zeta/src/onboarding_telemetry.rs deleted file mode 100644 index 3c7d5e1442947c3e8cea446ebf37597a3cce1f80..0000000000000000000000000000000000000000 --- a/crates/zeta/src/onboarding_telemetry.rs +++ /dev/null @@ -1,9 +0,0 @@ -#[macro_export] -macro_rules! onboarding_event { - ($name:expr) => { - telemetry::event!($name, source = "Edit Prediction Onboarding"); - }; - ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => { - telemetry::event!($name, source = "Edit Prediction Onboarding", $($key $(= $value)?),+); - }; -} diff --git a/crates/zeta/src/rate_completion_modal.rs b/crates/zeta/src/rate_completion_modal.rs deleted file mode 100644 index cc1787ab01c6dd8f6429c3ac821a485355629462..0000000000000000000000000000000000000000 --- a/crates/zeta/src/rate_completion_modal.rs +++ /dev/null @@ -1,685 +0,0 @@ -use crate::{CompletionDiffElement, EditPrediction, EditPredictionRating, Zeta}; -use editor::Editor; -use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, actions, prelude::*}; -use language::language_settings; -use std::time::Duration; -use ui::{KeyBinding, List, ListItem, ListItemSpacing, Tooltip, prelude::*}; -use workspace::{ModalView, Workspace}; - -actions!( - zeta, - [ - /// Rates the active completion with a thumbs up. - ThumbsUpActiveCompletion, - /// Rates the active completion with a thumbs down. - ThumbsDownActiveCompletion, - /// Navigates to the next edit in the completion history. - NextEdit, - /// Navigates to the previous edit in the completion history. - PreviousEdit, - /// Focuses on the completions list. - FocusCompletions, - /// Previews the selected completion. - PreviewCompletion, - ] -); - -pub struct RateCompletionModal { - zeta: Entity, - active_completion: Option, - selected_index: usize, - focus_handle: FocusHandle, - _subscription: gpui::Subscription, - current_view: RateCompletionView, -} - -struct ActiveCompletion { - completion: EditPrediction, - feedback_editor: Entity, -} - -#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] -enum RateCompletionView { - SuggestedEdits, - RawInput, -} - -impl RateCompletionView { - pub fn name(&self) -> &'static str { - match self { - Self::SuggestedEdits => "Suggested Edits", - Self::RawInput => "Recorded Events & Input", - } - } -} - -impl RateCompletionModal { - pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context) { - if let Some(zeta) = Zeta::global(cx) { - workspace.toggle_modal(window, cx, |_window, cx| RateCompletionModal::new(zeta, cx)); - - telemetry::event!("Rate Completion Modal Open", source = "Edit Prediction"); - } - } - - pub fn new(zeta: Entity, cx: &mut Context) -> Self { - let subscription = cx.observe(&zeta, |_, _, cx| cx.notify()); - - Self { - zeta, - selected_index: 0, - focus_handle: cx.focus_handle(), - active_completion: None, - _subscription: subscription, - current_view: RateCompletionView::SuggestedEdits, - } - } - - fn dismiss(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { - cx.emit(DismissEvent); - } - - fn select_next(&mut self, _: &menu::SelectNext, _: &mut Window, cx: &mut Context) { - self.selected_index += 1; - self.selected_index = usize::min( - self.selected_index, - self.zeta.read(cx).shown_completions().count(), - ); - cx.notify(); - } - - fn select_previous( - &mut self, - _: &menu::SelectPrevious, - _: &mut Window, - cx: &mut Context, - ) { - self.selected_index = self.selected_index.saturating_sub(1); - cx.notify(); - } - - fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { - let next_index = self - .zeta - .read(cx) - .shown_completions() - .skip(self.selected_index) - .enumerate() - .skip(1) // Skip straight to the next item - .find(|(_, completion)| !completion.edits.is_empty()) - .map(|(ix, _)| ix + self.selected_index); - - if let Some(next_index) = next_index { - self.selected_index = next_index; - cx.notify(); - } - } - - fn select_prev_edit(&mut self, _: &PreviousEdit, _: &mut Window, cx: &mut Context) { - let zeta = self.zeta.read(cx); - let completions_len = zeta.shown_completions_len(); - - let prev_index = self - .zeta - .read(cx) - .shown_completions() - .rev() - .skip((completions_len - 1) - self.selected_index) - .enumerate() - .skip(1) // Skip straight to the previous item - .find(|(_, completion)| !completion.edits.is_empty()) - .map(|(ix, _)| self.selected_index - ix); - - if let Some(prev_index) = prev_index { - self.selected_index = prev_index; - cx.notify(); - } - cx.notify(); - } - - fn select_first(&mut self, _: &menu::SelectFirst, _: &mut Window, cx: &mut Context) { - self.selected_index = 0; - cx.notify(); - } - - fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { - self.selected_index = self.zeta.read(cx).shown_completions_len() - 1; - cx.notify(); - } - - pub fn thumbs_up_active( - &mut self, - _: &ThumbsUpActiveCompletion, - window: &mut Window, - cx: &mut Context, - ) { - self.zeta.update(cx, |zeta, cx| { - if let Some(active) = &self.active_completion { - zeta.rate_completion( - &active.completion, - EditPredictionRating::Positive, - active.feedback_editor.read(cx).text(cx), - cx, - ); - } - }); - - let current_completion = self - .active_completion - .as_ref() - .map(|completion| completion.completion.clone()); - self.select_completion(current_completion, false, window, cx); - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - - cx.notify(); - } - - pub fn thumbs_down_active( - &mut self, - _: &ThumbsDownActiveCompletion, - window: &mut Window, - cx: &mut Context, - ) { - if let Some(active) = &self.active_completion { - if active.feedback_editor.read(cx).text(cx).is_empty() { - return; - } - - self.zeta.update(cx, |zeta, cx| { - zeta.rate_completion( - &active.completion, - EditPredictionRating::Negative, - active.feedback_editor.read(cx).text(cx), - cx, - ); - }); - } - - let current_completion = self - .active_completion - .as_ref() - .map(|completion| completion.completion.clone()); - self.select_completion(current_completion, false, window, cx); - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - - cx.notify(); - } - - fn focus_completions( - &mut self, - _: &FocusCompletions, - window: &mut Window, - cx: &mut Context, - ) { - cx.focus_self(window); - cx.notify(); - } - - fn preview_completion( - &mut self, - _: &PreviewCompletion, - window: &mut Window, - cx: &mut Context, - ) { - let completion = self - .zeta - .read(cx) - .shown_completions() - .skip(self.selected_index) - .take(1) - .next() - .cloned(); - - self.select_completion(completion, false, window, cx); - } - - fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { - let completion = self - .zeta - .read(cx) - .shown_completions() - .skip(self.selected_index) - .take(1) - .next() - .cloned(); - - self.select_completion(completion, true, window, cx); - } - - pub fn select_completion( - &mut self, - completion: Option, - focus: bool, - window: &mut Window, - cx: &mut Context, - ) { - // Avoid resetting completion rating if it's already selected. - if let Some(completion) = completion.as_ref() { - self.selected_index = self - .zeta - .read(cx) - .shown_completions() - .enumerate() - .find(|(_, completion_b)| completion.id == completion_b.id) - .map(|(ix, _)| ix) - .unwrap_or(self.selected_index); - cx.notify(); - - if let Some(prev_completion) = self.active_completion.as_ref() - && completion.id == prev_completion.completion.id - { - if focus { - window.focus(&prev_completion.feedback_editor.focus_handle(cx)); - } - return; - } - } - - self.active_completion = completion.map(|completion| ActiveCompletion { - completion, - feedback_editor: cx.new(|cx| { - let mut editor = Editor::multi_line(window, cx); - editor.disable_scrollbars_and_minimap(window, cx); - editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx); - editor.set_show_line_numbers(false, cx); - editor.set_show_git_diff_gutter(false, cx); - editor.set_show_code_actions(false, cx); - editor.set_show_runnables(false, cx); - editor.set_show_breakpoints(false, cx); - editor.set_show_wrap_guides(false, cx); - editor.set_show_indent_guides(false, cx); - editor.set_show_edit_predictions(Some(false), window, cx); - editor.set_placeholder_text("Add your feedback…", window, cx); - if focus { - cx.focus_self(window); - } - editor - }), - }); - cx.notify(); - } - - fn render_view_nav(&self, cx: &Context) -> impl IntoElement { - h_flex() - .h_8() - .px_1() - .border_b_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().elevated_surface_background) - .gap_1() - .child( - Button::new( - ElementId::Name("suggested-edits".into()), - RateCompletionView::SuggestedEdits.name(), - ) - .label_size(LabelSize::Small) - .on_click(cx.listener(move |this, _, _window, cx| { - this.current_view = RateCompletionView::SuggestedEdits; - cx.notify(); - })) - .toggle_state(self.current_view == RateCompletionView::SuggestedEdits), - ) - .child( - Button::new( - ElementId::Name("raw-input".into()), - RateCompletionView::RawInput.name(), - ) - .label_size(LabelSize::Small) - .on_click(cx.listener(move |this, _, _window, cx| { - this.current_view = RateCompletionView::RawInput; - cx.notify(); - })) - .toggle_state(self.current_view == RateCompletionView::RawInput), - ) - } - - fn render_suggested_edits(&self, cx: &mut Context) -> Option> { - let active_completion = self.active_completion.as_ref()?; - let bg_color = cx.theme().colors().editor_background; - - Some( - div() - .id("diff") - .p_4() - .size_full() - .bg(bg_color) - .overflow_scroll() - .whitespace_nowrap() - .child(CompletionDiffElement::new( - &active_completion.completion, - cx, - )), - ) - } - - fn render_raw_input(&self, cx: &mut Context) -> Option> { - Some( - v_flex() - .size_full() - .overflow_hidden() - .relative() - .child( - div() - .id("raw-input") - .py_4() - .px_6() - .size_full() - .bg(cx.theme().colors().editor_background) - .overflow_scroll() - .child(if let Some(active_completion) = &self.active_completion { - format!( - "{}\n{}", - active_completion.completion.input_events, - active_completion.completion.input_excerpt - ) - } else { - "No active completion".to_string() - }), - ) - .id("raw-input-view"), - ) - } - - fn render_active_completion(&mut self, cx: &mut Context) -> Option { - let active_completion = self.active_completion.as_ref()?; - let completion_id = active_completion.completion.id; - let focus_handle = &self.focus_handle(cx); - - let border_color = cx.theme().colors().border; - let bg_color = cx.theme().colors().editor_background; - - let rated = self.zeta.read(cx).is_completion_rated(completion_id); - let feedback_empty = active_completion - .feedback_editor - .read(cx) - .text(cx) - .is_empty(); - - let label_container = h_flex().pl_1().gap_1p5(); - - Some( - v_flex() - .size_full() - .overflow_hidden() - .relative() - .child( - v_flex() - .size_full() - .overflow_hidden() - .relative() - .child(self.render_view_nav(cx)) - .when_some(match self.current_view { - RateCompletionView::SuggestedEdits => self.render_suggested_edits(cx), - RateCompletionView::RawInput => self.render_raw_input(cx), - }, |this, element| this.child(element)) - ) - .when(!rated, |this| { - this.child( - h_flex() - .p_2() - .gap_2() - .border_y_1() - .border_color(border_color) - .child( - Icon::new(IconName::Info) - .size(IconSize::XSmall) - .color(Color::Muted) - ) - .child( - div() - .w_full() - .pr_2() - .flex_wrap() - .child( - Label::new("Explain why this completion is good or bad. If it's negative, describe what you expected instead.") - .size(LabelSize::Small) - .color(Color::Muted) - ) - ) - ) - }) - .when(!rated, |this| { - this.child( - div() - .h_40() - .pt_1() - .bg(bg_color) - .child(active_completion.feedback_editor.clone()) - ) - }) - .child( - h_flex() - .p_1() - .h_8() - .max_h_8() - .border_t_1() - .border_color(border_color) - .max_w_full() - .justify_between() - .children(if rated { - Some( - label_container - .child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Success), - ) - .child(Label::new("Rated completion.").color(Color::Muted)), - ) - } else if active_completion.completion.edits.is_empty() { - Some( - label_container - .child( - Icon::new(IconName::Warning) - .size(IconSize::Small) - .color(Color::Warning), - ) - .child(Label::new("No edits produced.").color(Color::Muted)), - ) - } else { - Some(label_container) - }) - .child( - h_flex() - .gap_1() - .child( - Button::new("bad", "Bad Completion") - .icon(IconName::ThumbsDown) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .disabled(rated || feedback_empty) - .when(feedback_empty, |this| { - this.tooltip(Tooltip::text("Explain what's bad about it before reporting it")) - }) - .key_binding(KeyBinding::for_action_in( - &ThumbsDownActiveCompletion, - focus_handle, - cx - )) - .on_click(cx.listener(move |this, _, window, cx| { - if this.active_completion.is_some() { - this.thumbs_down_active( - &ThumbsDownActiveCompletion, - window, cx, - ); - } - })), - ) - .child( - Button::new("good", "Good Completion") - .icon(IconName::ThumbsUp) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .disabled(rated) - .key_binding(KeyBinding::for_action_in( - &ThumbsUpActiveCompletion, - focus_handle, - cx - )) - .on_click(cx.listener(move |this, _, window, cx| { - if this.active_completion.is_some() { - this.thumbs_up_active(&ThumbsUpActiveCompletion, window, cx); - } - })), - ), - ), - ), - ) - } -} - -impl Render for RateCompletionModal { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let border_color = cx.theme().colors().border; - - h_flex() - .key_context("RateCompletionModal") - .track_focus(&self.focus_handle) - .on_action(cx.listener(Self::dismiss)) - .on_action(cx.listener(Self::confirm)) - .on_action(cx.listener(Self::select_previous)) - .on_action(cx.listener(Self::select_prev_edit)) - .on_action(cx.listener(Self::select_next)) - .on_action(cx.listener(Self::select_next_edit)) - .on_action(cx.listener(Self::select_first)) - .on_action(cx.listener(Self::select_last)) - .on_action(cx.listener(Self::thumbs_up_active)) - .on_action(cx.listener(Self::thumbs_down_active)) - .on_action(cx.listener(Self::focus_completions)) - .on_action(cx.listener(Self::preview_completion)) - .bg(cx.theme().colors().elevated_surface_background) - .border_1() - .border_color(border_color) - .w(window.viewport_size().width - px(320.)) - .h(window.viewport_size().height - px(300.)) - .rounded_lg() - .shadow_lg() - .child( - v_flex() - .w_72() - .h_full() - .border_r_1() - .border_color(border_color) - .flex_shrink_0() - .overflow_hidden() - .child( - h_flex() - .h_8() - .px_2() - .justify_between() - .border_b_1() - .border_color(border_color) - .child( - Icon::new(IconName::ZedPredict) - .size(IconSize::Small) - ) - .child( - Label::new("From most recent to oldest") - .color(Color::Muted) - .size(LabelSize::Small), - ) - ) - .child( - div() - .id("completion_list") - .p_0p5() - .h_full() - .overflow_y_scroll() - .child( - List::new() - .empty_message( - div() - .p_2() - .child( - Label::new("No completions yet. Use the editor to generate some, and make sure to rate them!") - .color(Color::Muted), - ) - .into_any_element(), - ) - .children(self.zeta.read(cx).shown_completions().cloned().enumerate().map( - |(index, completion)| { - let selected = - self.active_completion.as_ref().is_some_and(|selected| { - selected.completion.id == completion.id - }); - let rated = - self.zeta.read(cx).is_completion_rated(completion.id); - - let (icon_name, icon_color, tooltip_text) = match (rated, completion.edits.is_empty()) { - (true, _) => (IconName::Check, Color::Success, "Rated Completion"), - (false, true) => (IconName::File, Color::Muted, "No Edits Produced"), - (false, false) => (IconName::FileDiff, Color::Accent, "Edits Available"), - }; - - let file_name = completion.path.file_name().map(|f| f.to_string_lossy().into_owned()).unwrap_or("untitled".to_string()); - let file_path = completion.path.parent().map(|p| p.to_string_lossy().into_owned()); - - ListItem::new(completion.id) - .inset(true) - .spacing(ListItemSpacing::Sparse) - .focused(index == self.selected_index) - .toggle_state(selected) - .child( - h_flex() - .id("completion-content") - .gap_3() - .child( - Icon::new(icon_name) - .color(icon_color) - .size(IconSize::Small) - ) - .child( - v_flex() - .child( - h_flex().gap_1() - .child(Label::new(file_name).size(LabelSize::Small)) - .when_some(file_path, |this, p| this.child(Label::new(p).size(LabelSize::Small).color(Color::Muted))) - ) - .child(Label::new(format!("{} ago, {:.2?}", format_time_ago(completion.response_received_at.elapsed()), completion.latency())) - .color(Color::Muted) - .size(LabelSize::XSmall) - ) - ) - ) - .tooltip(Tooltip::text(tooltip_text)) - .on_click(cx.listener(move |this, _, window, cx| { - this.select_completion(Some(completion.clone()), true, window, cx); - })) - }, - )), - ) - ), - ) - .children(self.render_active_completion( cx)) - .on_mouse_down_out(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))) - } -} - -impl EventEmitter for RateCompletionModal {} - -impl Focusable for RateCompletionModal { - fn focus_handle(&self, _cx: &App) -> FocusHandle { - self.focus_handle.clone() - } -} - -impl ModalView for RateCompletionModal {} - -fn format_time_ago(elapsed: Duration) -> String { - let seconds = elapsed.as_secs(); - if seconds < 120 { - "1 minute".to_string() - } else if seconds < 3600 { - format!("{} minutes", seconds / 60) - } else if seconds < 7200 { - "1 hour".to_string() - } else if seconds < 86400 { - format!("{} hours", seconds / 3600) - } else if seconds < 172800 { - "1 day".to_string() - } else { - format!("{} days", seconds / 86400) - } -} diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs deleted file mode 100644 index 708a53ff47bd2c60e6b9620e8bed30b16419ba14..0000000000000000000000000000000000000000 --- a/crates/zeta/src/zeta.rs +++ /dev/null @@ -1,2245 +0,0 @@ -mod completion_diff_element; -mod init; -mod input_excerpt; -mod license_detection; -mod onboarding_modal; -mod onboarding_telemetry; -mod rate_completion_modal; - -pub(crate) use completion_diff_element::*; -use db::kvp::{Dismissable, KEY_VALUE_STORE}; -use edit_prediction::DataCollectionState; -pub use init::*; -use license_detection::LicenseDetectionWatcher; -pub use rate_completion_modal::*; - -use anyhow::{Context as _, Result, anyhow}; -use arrayvec::ArrayVec; -use client::{Client, EditPredictionUsage, UserStore}; -use cloud_llm_client::{ - AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, - PredictEditsBody, PredictEditsGitInfo, PredictEditsResponse, ZED_VERSION_HEADER_NAME, -}; -use collections::{HashMap, HashSet, VecDeque}; -use futures::AsyncReadExt; -use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EntityId, Global, SemanticVersion, - SharedString, Subscription, Task, actions, -}; -use http_client::{AsyncBody, HttpClient, Method, Request, Response}; -use input_excerpt::excerpt_for_cursor_position; -use language::{ - Anchor, Buffer, BufferSnapshot, EditPreview, File, OffsetRangeExt, ToOffset, ToPoint, text_diff, -}; -use language_model::{LlmApiToken, RefreshLlmTokenListener}; -use project::{Project, ProjectPath}; -use release_channel::AppVersion; -use settings::WorktreeId; -use std::collections::hash_map; -use std::mem; -use std::str::FromStr; -use std::{ - cmp, - fmt::Write, - future::Future, - ops::Range, - path::Path, - rc::Rc, - sync::Arc, - time::{Duration, Instant}, -}; -use telemetry_events::EditPredictionRating; -use thiserror::Error; -use util::ResultExt; -use util::rel_path::RelPath; -use uuid::Uuid; -use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; -use worktree::Worktree; - -const CURSOR_MARKER: &str = "<|user_cursor_is_here|>"; -const START_OF_FILE_MARKER: &str = "<|start_of_file|>"; -const EDITABLE_REGION_START_MARKER: &str = "<|editable_region_start|>"; -const EDITABLE_REGION_END_MARKER: &str = "<|editable_region_end|>"; -const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1); -const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice"; - -const MAX_CONTEXT_TOKENS: usize = 150; -const MAX_REWRITE_TOKENS: usize = 350; -const MAX_EVENT_TOKENS: usize = 500; - -/// Maximum number of events to track. -const MAX_EVENT_COUNT: usize = 16; - -actions!( - edit_prediction, - [ - /// Clears the edit prediction history. - ClearHistory - ] -); - -#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)] -pub struct EditPredictionId(Uuid); - -impl From for gpui::ElementId { - fn from(value: EditPredictionId) -> Self { - gpui::ElementId::Uuid(value.0) - } -} - -impl std::fmt::Display for EditPredictionId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -struct ZedPredictUpsell; - -impl Dismissable for ZedPredictUpsell { - const KEY: &'static str = "dismissed-edit-predict-upsell"; - - fn dismissed() -> bool { - // To make this backwards compatible with older versions of Zed, we - // check if the user has seen the previous Edit Prediction Onboarding - // before, by checking the data collection choice which was written to - // the database once the user clicked on "Accept and Enable" - if KEY_VALUE_STORE - .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE) - .log_err() - .is_some_and(|s| s.is_some()) - { - return true; - } - - KEY_VALUE_STORE - .read_kvp(Self::KEY) - .log_err() - .is_some_and(|s| s.is_some()) - } -} - -pub fn should_show_upsell_modal() -> bool { - !ZedPredictUpsell::dismissed() -} - -#[derive(Clone)] -struct ZetaGlobal(Entity); - -impl Global for ZetaGlobal {} - -#[derive(Clone)] -pub struct EditPrediction { - id: EditPredictionId, - path: Arc, - excerpt_range: Range, - cursor_offset: usize, - edits: Arc<[(Range, Arc)]>, - snapshot: BufferSnapshot, - edit_preview: EditPreview, - input_outline: Arc, - input_events: Arc, - input_excerpt: Arc, - output_excerpt: Arc, - buffer_snapshotted_at: Instant, - response_received_at: Instant, -} - -impl EditPrediction { - fn latency(&self) -> Duration { - self.response_received_at - .duration_since(self.buffer_snapshotted_at) - } - - fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option, Arc)>> { - edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits) - } -} - -impl std::fmt::Debug for EditPrediction { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("EditPrediction") - .field("id", &self.id) - .field("path", &self.path) - .field("edits", &self.edits) - .finish_non_exhaustive() - } -} - -pub struct Zeta { - projects: HashMap, - client: Arc, - shown_completions: VecDeque, - rated_completions: HashSet, - data_collection_choice: DataCollectionChoice, - llm_token: LlmApiToken, - _llm_token_subscription: Subscription, - /// Whether an update to a newer version of Zed is required to continue using Zeta. - update_required: bool, - user_store: Entity, - license_detection_watchers: HashMap>, -} - -struct ZetaProject { - events: VecDeque, - registered_buffers: HashMap, -} - -impl Zeta { - pub fn global(cx: &mut App) -> Option> { - cx.try_global::().map(|global| global.0.clone()) - } - - pub fn register( - worktree: Option>, - client: Arc, - user_store: Entity, - cx: &mut App, - ) -> Entity { - let this = Self::global(cx).unwrap_or_else(|| { - let entity = cx.new(|cx| Self::new(client, user_store, cx)); - cx.set_global(ZetaGlobal(entity.clone())); - entity - }); - - this.update(cx, move |this, cx| { - if let Some(worktree) = worktree { - let worktree_id = worktree.read(cx).id(); - this.license_detection_watchers - .entry(worktree_id) - .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx))); - } - }); - - this - } - - pub fn clear_history(&mut self) { - for zeta_project in self.projects.values_mut() { - zeta_project.events.clear(); - } - } - - pub fn usage(&self, cx: &App) -> Option { - self.user_store.read(cx).edit_prediction_usage() - } - - fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let data_collection_choice = Self::load_data_collection_choice(); - Self { - projects: HashMap::default(), - client, - shown_completions: VecDeque::new(), - rated_completions: HashSet::default(), - data_collection_choice, - llm_token: LlmApiToken::default(), - _llm_token_subscription: cx.subscribe( - &refresh_llm_token_listener, - |this, _listener, _event, cx| { - let client = this.client.clone(); - let llm_token = this.llm_token.clone(); - cx.spawn(async move |_this, _cx| { - llm_token.refresh(&client).await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }, - ), - update_required: false, - license_detection_watchers: HashMap::default(), - user_store, - } - } - - fn get_or_init_zeta_project( - &mut self, - project: &Entity, - cx: &mut Context, - ) -> &mut ZetaProject { - let project_id = project.entity_id(); - match self.projects.entry(project_id) { - hash_map::Entry::Occupied(entry) => entry.into_mut(), - hash_map::Entry::Vacant(entry) => { - cx.observe_release(project, move |this, _, _cx| { - this.projects.remove(&project_id); - }) - .detach(); - entry.insert(ZetaProject { - events: VecDeque::with_capacity(MAX_EVENT_COUNT), - registered_buffers: HashMap::default(), - }) - } - } - } - - fn push_event(zeta_project: &mut ZetaProject, event: Event) { - let events = &mut zeta_project.events; - - if let Some(Event::BufferChange { - new_snapshot: last_new_snapshot, - timestamp: last_timestamp, - .. - }) = events.back_mut() - { - // Coalesce edits for the same buffer when they happen one after the other. - let Event::BufferChange { - old_snapshot, - new_snapshot, - timestamp, - } = &event; - - if timestamp.duration_since(*last_timestamp) <= BUFFER_CHANGE_GROUPING_INTERVAL - && old_snapshot.remote_id() == last_new_snapshot.remote_id() - && old_snapshot.version == last_new_snapshot.version - { - *last_new_snapshot = new_snapshot.clone(); - *last_timestamp = *timestamp; - return; - } - } - - if events.len() >= MAX_EVENT_COUNT { - // These are halved instead of popping to improve prompt caching. - events.drain(..MAX_EVENT_COUNT / 2); - } - - events.push_back(event); - } - - pub fn register_buffer( - &mut self, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) { - let zeta_project = self.get_or_init_zeta_project(project, cx); - Self::register_buffer_impl(zeta_project, buffer, project, cx); - } - - fn register_buffer_impl<'a>( - zeta_project: &'a mut ZetaProject, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) -> &'a mut RegisteredBuffer { - let buffer_id = buffer.entity_id(); - match zeta_project.registered_buffers.entry(buffer_id) { - hash_map::Entry::Occupied(entry) => entry.into_mut(), - hash_map::Entry::Vacant(entry) => { - let snapshot = buffer.read(cx).snapshot(); - let project_entity_id = project.entity_id(); - entry.insert(RegisteredBuffer { - snapshot, - _subscriptions: [ - cx.subscribe(buffer, { - let project = project.downgrade(); - move |this, buffer, event, cx| { - if let language::BufferEvent::Edited = event - && let Some(project) = project.upgrade() - { - this.report_changes_for_buffer(&buffer, &project, cx); - } - } - }), - cx.observe_release(buffer, move |this, _buffer, _cx| { - let Some(zeta_project) = this.projects.get_mut(&project_entity_id) - else { - return; - }; - zeta_project.registered_buffers.remove(&buffer_id); - }), - ], - }) - } - } - } - - fn request_completion_impl( - &mut self, - project: &Entity, - buffer: &Entity, - cursor: language::Anchor, - cx: &mut Context, - perform_predict_edits: F, - ) -> Task>> - where - F: FnOnce(PerformPredictEditsParams) -> R + 'static, - R: Future)>> - + Send - + 'static, - { - let buffer = buffer.clone(); - let buffer_snapshotted_at = Instant::now(); - let snapshot = self.report_changes_for_buffer(&buffer, project, cx); - let zeta = cx.entity(); - let client = self.client.clone(); - let llm_token = self.llm_token.clone(); - let app_version = AppVersion::global(cx); - - let zeta_project = self.get_or_init_zeta_project(project, cx); - let mut events = Vec::with_capacity(zeta_project.events.len()); - events.extend(zeta_project.events.iter().cloned()); - let events = Arc::new(events); - - let (git_info, can_collect_file) = if let Some(file) = snapshot.file() { - let can_collect_file = self.can_collect_file(file, cx); - let git_info = if can_collect_file { - git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx) - } else { - None - }; - (git_info, can_collect_file) - } else { - (None, false) - }; - - let full_path: Arc = snapshot - .file() - .map(|f| Arc::from(f.full_path(cx).as_path())) - .unwrap_or_else(|| Arc::from(Path::new("untitled"))); - let full_path_str = full_path.to_string_lossy().into_owned(); - let cursor_point = cursor.to_point(&snapshot); - let cursor_offset = cursor_point.to_offset(&snapshot); - let prompt_for_events = { - let events = events.clone(); - move || prompt_for_events_impl(&events, MAX_EVENT_TOKENS) - }; - let gather_task = gather_context( - full_path_str, - &snapshot, - cursor_point, - prompt_for_events, - cx, - ); - - cx.spawn(async move |this, cx| { - let GatherContextOutput { - mut body, - editable_range, - included_events_count, - } = gather_task.await?; - let done_gathering_context_at = Instant::now(); - - let included_events = &events[events.len() - included_events_count..events.len()]; - body.can_collect_data = can_collect_file - && this - .read_with(cx, |this, cx| this.can_collect_events(included_events, cx)) - .unwrap_or(false); - if body.can_collect_data { - body.git_info = git_info; - } - - log::debug!( - "Events:\n{}\nExcerpt:\n{:?}", - body.input_events, - body.input_excerpt - ); - - let input_outline = body.outline.clone().unwrap_or_default(); - let input_events = body.input_events.clone(); - let input_excerpt = body.input_excerpt.clone(); - - let response = perform_predict_edits(PerformPredictEditsParams { - client, - llm_token, - app_version, - body, - }) - .await; - let (response, usage) = match response { - Ok(response) => response, - Err(err) => { - if err.is::() { - cx.update(|cx| { - zeta.update(cx, |zeta, _cx| { - zeta.update_required = true; - }); - - let error_message: SharedString = err.to_string().into(); - show_app_notification( - NotificationId::unique::(), - cx, - move |cx| { - cx.new(|cx| { - ErrorMessagePrompt::new(error_message.clone(), cx) - .with_link_button( - "Update Zed", - "https://zed.dev/releases", - ) - }) - }, - ); - }) - .ok(); - } - - return Err(err); - } - }; - - let received_response_at = Instant::now(); - log::debug!("completion response: {}", &response.output_excerpt); - - if let Some(usage) = usage { - this.update(cx, |this, cx| { - this.user_store.update(cx, |user_store, cx| { - user_store.update_edit_prediction_usage(usage, cx); - }); - }) - .ok(); - } - - let edit_prediction = Self::process_completion_response( - response, - buffer, - &snapshot, - editable_range, - cursor_offset, - full_path, - input_outline, - input_events, - input_excerpt, - buffer_snapshotted_at, - cx, - ) - .await; - - let finished_at = Instant::now(); - - // record latency for ~1% of requests - if rand::random::() <= 2 { - telemetry::event!( - "Edit Prediction Request", - context_latency = done_gathering_context_at - .duration_since(buffer_snapshotted_at) - .as_millis(), - request_latency = received_response_at - .duration_since(done_gathering_context_at) - .as_millis(), - process_latency = finished_at.duration_since(received_response_at).as_millis() - ); - } - - edit_prediction - }) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn fake_completion( - &mut self, - project: &Entity, - buffer: &Entity, - position: language::Anchor, - response: PredictEditsResponse, - cx: &mut Context, - ) -> Task>> { - self.request_completion_impl(project, buffer, position, cx, |_params| { - std::future::ready(Ok((response, None))) - }) - } - - pub fn request_completion( - &mut self, - project: &Entity, - buffer: &Entity, - position: language::Anchor, - cx: &mut Context, - ) -> Task>> { - self.request_completion_impl(project, buffer, position, cx, Self::perform_predict_edits) - } - - pub fn perform_predict_edits( - params: PerformPredictEditsParams, - ) -> impl Future)>> { - async move { - let PerformPredictEditsParams { - client, - llm_token, - app_version, - body, - .. - } = params; - - let http_client = client.http_client(); - let mut token = llm_token.acquire(&client).await?; - let mut did_retry = false; - - loop { - let request_builder = http_client::Request::builder().method(Method::POST); - let request_builder = - if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") { - request_builder.uri(predict_edits_url) - } else { - request_builder.uri( - http_client - .build_zed_llm_url("/predict_edits/v2", &[])? - .as_ref(), - ) - }; - let request = request_builder - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", token)) - .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) - .body(serde_json::to_string(&body)?.into())?; - - let mut response = http_client.send(request).await?; - - if let Some(minimum_required_version) = response - .headers() - .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME) - .and_then(|version| SemanticVersion::from_str(version.to_str().ok()?).ok()) - { - anyhow::ensure!( - app_version >= minimum_required_version, - ZedUpdateRequiredError { - minimum_version: minimum_required_version - } - ); - } - - if response.status().is_success() { - let usage = EditPredictionUsage::from_headers(response.headers()).ok(); - - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - return Ok((serde_json::from_str(&body)?, usage)); - } else if !did_retry - && response - .headers() - .get(EXPIRED_LLM_TOKEN_HEADER_NAME) - .is_some() - { - did_retry = true; - token = llm_token.refresh(&client).await?; - } else { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - anyhow::bail!( - "error predicting edits.\nStatus: {:?}\nBody: {}", - response.status(), - body - ); - } - } - } - } - - fn accept_edit_prediction( - &mut self, - request_id: EditPredictionId, - cx: &mut Context, - ) -> Task> { - let client = self.client.clone(); - let llm_token = self.llm_token.clone(); - let app_version = AppVersion::global(cx); - cx.spawn(async move |this, cx| { - let http_client = client.http_client(); - let mut response = llm_token_retry(&llm_token, &client, |token| { - let request_builder = http_client::Request::builder().method(Method::POST); - let request_builder = - if let Ok(accept_prediction_url) = std::env::var("ZED_ACCEPT_PREDICTION_URL") { - request_builder.uri(accept_prediction_url) - } else { - request_builder.uri( - http_client - .build_zed_llm_url("/predict_edits/accept", &[])? - .as_ref(), - ) - }; - Ok(request_builder - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", token)) - .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) - .body( - serde_json::to_string(&AcceptEditPredictionBody { - request_id: request_id.0.to_string(), - })? - .into(), - )?) - }) - .await?; - - if let Some(minimum_required_version) = response - .headers() - .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME) - .and_then(|version| SemanticVersion::from_str(version.to_str().ok()?).ok()) - && app_version < minimum_required_version - { - return Err(anyhow!(ZedUpdateRequiredError { - minimum_version: minimum_required_version - })); - } - - if response.status().is_success() { - if let Some(usage) = EditPredictionUsage::from_headers(response.headers()).ok() { - this.update(cx, |this, cx| { - this.user_store.update(cx, |user_store, cx| { - user_store.update_edit_prediction_usage(usage, cx); - }); - })?; - } - - Ok(()) - } else { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - Err(anyhow!( - "error accepting edit prediction.\nStatus: {:?}\nBody: {}", - response.status(), - body - )) - } - }) - } - - fn process_completion_response( - prediction_response: PredictEditsResponse, - buffer: Entity, - snapshot: &BufferSnapshot, - editable_range: Range, - cursor_offset: usize, - path: Arc, - input_outline: String, - input_events: String, - input_excerpt: String, - buffer_snapshotted_at: Instant, - cx: &AsyncApp, - ) -> Task>> { - let snapshot = snapshot.clone(); - let request_id = prediction_response.request_id; - let output_excerpt = prediction_response.output_excerpt; - cx.spawn(async move |cx| { - let output_excerpt: Arc = output_excerpt.into(); - - let edits: Arc<[(Range, Arc)]> = cx - .background_spawn({ - let output_excerpt = output_excerpt.clone(); - let editable_range = editable_range.clone(); - let snapshot = snapshot.clone(); - async move { Self::parse_edits(output_excerpt, editable_range, &snapshot) } - }) - .await? - .into(); - - let Some((edits, snapshot, edit_preview)) = buffer.read_with(cx, { - let edits = edits.clone(); - move |buffer, cx| { - let new_snapshot = buffer.snapshot(); - let edits: Arc<[(Range, Arc)]> = - edit_prediction::interpolate_edits(&snapshot, &new_snapshot, &edits)? - .into(); - Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx))) - } - })? - else { - return anyhow::Ok(None); - }; - - let request_id = Uuid::from_str(&request_id).context("failed to parse request id")?; - - let edit_preview = edit_preview.await; - - Ok(Some(EditPrediction { - id: EditPredictionId(request_id), - path, - excerpt_range: editable_range, - cursor_offset, - edits, - edit_preview, - snapshot, - input_outline: input_outline.into(), - input_events: input_events.into(), - input_excerpt: input_excerpt.into(), - output_excerpt, - buffer_snapshotted_at, - response_received_at: Instant::now(), - })) - }) - } - - fn parse_edits( - output_excerpt: Arc, - editable_range: Range, - snapshot: &BufferSnapshot, - ) -> Result, Arc)>> { - let content = output_excerpt.replace(CURSOR_MARKER, ""); - - let start_markers = content - .match_indices(EDITABLE_REGION_START_MARKER) - .collect::>(); - anyhow::ensure!( - start_markers.len() == 1, - "expected exactly one start marker, found {}", - start_markers.len() - ); - - let end_markers = content - .match_indices(EDITABLE_REGION_END_MARKER) - .collect::>(); - anyhow::ensure!( - end_markers.len() == 1, - "expected exactly one end marker, found {}", - end_markers.len() - ); - - let sof_markers = content - .match_indices(START_OF_FILE_MARKER) - .collect::>(); - anyhow::ensure!( - sof_markers.len() <= 1, - "expected at most one start-of-file marker, found {}", - sof_markers.len() - ); - - let codefence_start = start_markers[0].0; - let content = &content[codefence_start..]; - - let newline_ix = content.find('\n').context("could not find newline")?; - let content = &content[newline_ix + 1..]; - - let codefence_end = content - .rfind(&format!("\n{EDITABLE_REGION_END_MARKER}")) - .context("could not find end marker")?; - let new_text = &content[..codefence_end]; - - let old_text = snapshot - .text_for_range(editable_range.clone()) - .collect::(); - - Ok(Self::compute_edits( - old_text, - new_text, - editable_range.start, - snapshot, - )) - } - - pub fn compute_edits( - old_text: String, - new_text: &str, - offset: usize, - snapshot: &BufferSnapshot, - ) -> Vec<(Range, Arc)> { - text_diff(&old_text, new_text) - .into_iter() - .map(|(mut old_range, new_text)| { - old_range.start += offset; - old_range.end += offset; - - let prefix_len = common_prefix( - snapshot.chars_for_range(old_range.clone()), - new_text.chars(), - ); - old_range.start += prefix_len; - - let suffix_len = common_prefix( - snapshot.reversed_chars_for_range(old_range.clone()), - new_text[prefix_len..].chars().rev(), - ); - old_range.end = old_range.end.saturating_sub(suffix_len); - - let new_text = new_text[prefix_len..new_text.len() - suffix_len].into(); - let range = if old_range.is_empty() { - let anchor = snapshot.anchor_after(old_range.start); - anchor..anchor - } else { - snapshot.anchor_after(old_range.start)..snapshot.anchor_before(old_range.end) - }; - (range, new_text) - }) - .collect() - } - - pub fn is_completion_rated(&self, completion_id: EditPredictionId) -> bool { - self.rated_completions.contains(&completion_id) - } - - pub fn completion_shown(&mut self, completion: &EditPrediction, cx: &mut Context) { - self.shown_completions.push_front(completion.clone()); - if self.shown_completions.len() > 50 { - let completion = self.shown_completions.pop_back().unwrap(); - self.rated_completions.remove(&completion.id); - } - cx.notify(); - } - - pub fn rate_completion( - &mut self, - completion: &EditPrediction, - rating: EditPredictionRating, - feedback: String, - cx: &mut Context, - ) { - self.rated_completions.insert(completion.id); - telemetry::event!( - "Edit Prediction Rated", - rating, - input_events = completion.input_events, - input_excerpt = completion.input_excerpt, - input_outline = completion.input_outline, - output_excerpt = completion.output_excerpt, - feedback - ); - self.client.telemetry().flush_events().detach(); - cx.notify(); - } - - pub fn shown_completions(&self) -> impl DoubleEndedIterator { - self.shown_completions.iter() - } - - pub fn shown_completions_len(&self) -> usize { - self.shown_completions.len() - } - - fn report_changes_for_buffer( - &mut self, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) -> BufferSnapshot { - let zeta_project = self.get_or_init_zeta_project(project, cx); - let registered_buffer = Self::register_buffer_impl(zeta_project, buffer, project, cx); - - let new_snapshot = buffer.read(cx).snapshot(); - if new_snapshot.version != registered_buffer.snapshot.version { - let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); - Self::push_event( - zeta_project, - Event::BufferChange { - old_snapshot, - new_snapshot: new_snapshot.clone(), - timestamp: Instant::now(), - }, - ); - } - - new_snapshot - } - - fn can_collect_file(&self, file: &Arc, cx: &App) -> bool { - self.data_collection_choice.is_enabled() && self.is_file_open_source(file, cx) - } - - fn can_collect_events(&self, events: &[Event], cx: &App) -> bool { - if !self.data_collection_choice.is_enabled() { - return false; - } - let mut last_checked_file = None; - for event in events { - match event { - Event::BufferChange { - old_snapshot, - new_snapshot, - .. - } => { - if let Some(old_file) = old_snapshot.file() - && let Some(new_file) = new_snapshot.file() - { - if let Some(last_checked_file) = last_checked_file - && Arc::ptr_eq(last_checked_file, old_file) - && Arc::ptr_eq(last_checked_file, new_file) - { - continue; - } - if !self.can_collect_file(old_file, cx) { - return false; - } - if !Arc::ptr_eq(old_file, new_file) && !self.can_collect_file(new_file, cx) - { - return false; - } - last_checked_file = Some(new_file); - } else { - return false; - } - } - } - } - true - } - - fn is_file_open_source(&self, file: &Arc, cx: &App) -> bool { - if !file.is_local() || file.is_private() { - return false; - } - self.license_detection_watchers - .get(&file.worktree_id(cx)) - .is_some_and(|watcher| watcher.is_project_open_source()) - } - - fn load_data_collection_choice() -> DataCollectionChoice { - let choice = KEY_VALUE_STORE - .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE) - .log_err() - .flatten(); - - match choice.as_deref() { - Some("true") => DataCollectionChoice::Enabled, - Some("false") => DataCollectionChoice::Disabled, - Some(_) => { - log::error!("unknown value in '{ZED_PREDICT_DATA_COLLECTION_CHOICE}'"); - DataCollectionChoice::NotAnswered - } - None => DataCollectionChoice::NotAnswered, - } - } - - fn toggle_data_collection_choice(&mut self, cx: &mut Context) { - self.data_collection_choice = self.data_collection_choice.toggle(); - let new_choice = self.data_collection_choice; - db::write_and_log(cx, move || { - KEY_VALUE_STORE.write_kvp( - ZED_PREDICT_DATA_COLLECTION_CHOICE.into(), - new_choice.is_enabled().to_string(), - ) - }); - } -} - -pub struct PerformPredictEditsParams { - pub client: Arc, - pub llm_token: LlmApiToken, - pub app_version: SemanticVersion, - pub body: PredictEditsBody, -} - -#[derive(Error, Debug)] -#[error( - "You must update to Zed version {minimum_version} or higher to continue using edit predictions." -)] -pub struct ZedUpdateRequiredError { - minimum_version: SemanticVersion, -} - -fn common_prefix, T2: Iterator>(a: T1, b: T2) -> usize { - a.zip(b) - .take_while(|(a, b)| a == b) - .map(|(a, _)| a.len_utf8()) - .sum() -} - -fn git_info_for_file( - project: &Entity, - project_path: &ProjectPath, - cx: &App, -) -> Option { - let git_store = project.read(cx).git_store().read(cx); - if let Some((repository, _repo_path)) = - git_store.repository_and_path_for_project_path(project_path, cx) - { - let repository = repository.read(cx); - let head_sha = repository - .head_commit - .as_ref() - .map(|head_commit| head_commit.sha.to_string()); - let remote_origin_url = repository.remote_origin_url.clone(); - let remote_upstream_url = repository.remote_upstream_url.clone(); - if head_sha.is_none() && remote_origin_url.is_none() && remote_upstream_url.is_none() { - return None; - } - Some(PredictEditsGitInfo { - head_sha, - remote_origin_url, - remote_upstream_url, - }) - } else { - None - } -} - -pub struct GatherContextOutput { - pub body: PredictEditsBody, - pub editable_range: Range, - pub included_events_count: usize, -} - -pub fn gather_context( - full_path_str: String, - snapshot: &BufferSnapshot, - cursor_point: language::Point, - prompt_for_events: impl FnOnce() -> (String, usize) + Send + 'static, - cx: &App, -) -> Task> { - cx.background_spawn({ - let snapshot = snapshot.clone(); - async move { - let input_excerpt = excerpt_for_cursor_position( - cursor_point, - &full_path_str, - &snapshot, - MAX_REWRITE_TOKENS, - MAX_CONTEXT_TOKENS, - ); - let (input_events, included_events_count) = prompt_for_events(); - let editable_range = input_excerpt.editable_range.to_offset(&snapshot); - - let body = PredictEditsBody { - input_events, - input_excerpt: input_excerpt.prompt, - can_collect_data: false, - diagnostic_groups: None, - git_info: None, - outline: None, - speculated_output: None, - }; - - Ok(GatherContextOutput { - body, - editable_range, - included_events_count, - }) - } - }) -} - -fn prompt_for_events_impl(events: &[Event], mut remaining_tokens: usize) -> (String, usize) { - let mut result = String::new(); - for (ix, event) in events.iter().rev().enumerate() { - let event_string = event.to_prompt(); - let event_tokens = guess_token_count(event_string.len()); - if event_tokens > remaining_tokens { - return (result, ix); - } - - if !result.is_empty() { - result.insert_str(0, "\n\n"); - } - result.insert_str(0, &event_string); - remaining_tokens -= event_tokens; - } - return (result, events.len()); -} - -struct RegisteredBuffer { - snapshot: BufferSnapshot, - _subscriptions: [gpui::Subscription; 2], -} - -#[derive(Clone)] -pub enum Event { - BufferChange { - old_snapshot: BufferSnapshot, - new_snapshot: BufferSnapshot, - timestamp: Instant, - }, -} - -impl Event { - fn to_prompt(&self) -> String { - match self { - Event::BufferChange { - old_snapshot, - new_snapshot, - .. - } => { - let mut prompt = String::new(); - - let old_path = old_snapshot - .file() - .map(|f| f.path().as_ref()) - .unwrap_or(RelPath::unix("untitled").unwrap()); - let new_path = new_snapshot - .file() - .map(|f| f.path().as_ref()) - .unwrap_or(RelPath::unix("untitled").unwrap()); - if old_path != new_path { - writeln!(prompt, "User renamed {:?} to {:?}\n", old_path, new_path).unwrap(); - } - - let diff = language::unified_diff(&old_snapshot.text(), &new_snapshot.text()); - if !diff.is_empty() { - write!( - prompt, - "User edited {:?}:\n```diff\n{}\n```", - new_path, diff - ) - .unwrap(); - } - - prompt - } - } - } -} - -#[derive(Debug, Clone)] -struct CurrentEditPrediction { - buffer_id: EntityId, - completion: EditPrediction, -} - -impl CurrentEditPrediction { - fn should_replace_completion(&self, old_completion: &Self, snapshot: &BufferSnapshot) -> bool { - if self.buffer_id != old_completion.buffer_id { - return true; - } - - let Some(old_edits) = old_completion.completion.interpolate(snapshot) else { - return true; - }; - let Some(new_edits) = self.completion.interpolate(snapshot) else { - return false; - }; - - if old_edits.len() == 1 && new_edits.len() == 1 { - let (old_range, old_text) = &old_edits[0]; - let (new_range, new_text) = &new_edits[0]; - new_range == old_range && new_text.starts_with(old_text.as_ref()) - } else { - true - } - } -} - -struct PendingCompletion { - id: usize, - _task: Task<()>, -} - -#[derive(Debug, Clone, Copy)] -pub enum DataCollectionChoice { - NotAnswered, - Enabled, - Disabled, -} - -impl DataCollectionChoice { - pub fn is_enabled(self) -> bool { - match self { - Self::Enabled => true, - Self::NotAnswered | Self::Disabled => false, - } - } - - pub fn is_answered(self) -> bool { - match self { - Self::Enabled | Self::Disabled => true, - Self::NotAnswered => false, - } - } - - #[must_use] - pub fn toggle(&self) -> DataCollectionChoice { - match self { - Self::Enabled => Self::Disabled, - Self::Disabled => Self::Enabled, - Self::NotAnswered => Self::Enabled, - } - } -} - -impl From for DataCollectionChoice { - fn from(value: bool) -> Self { - match value { - true => DataCollectionChoice::Enabled, - false => DataCollectionChoice::Disabled, - } - } -} - -async fn llm_token_retry( - llm_token: &LlmApiToken, - client: &Arc, - build_request: impl Fn(String) -> Result>, -) -> Result> { - let mut did_retry = false; - let http_client = client.http_client(); - let mut token = llm_token.acquire(client).await?; - loop { - let request = build_request(token.clone())?; - let response = http_client.send(request).await?; - - if !did_retry - && !response.status().is_success() - && response - .headers() - .get(EXPIRED_LLM_TOKEN_HEADER_NAME) - .is_some() - { - did_retry = true; - token = llm_token.refresh(client).await?; - continue; - } - - return Ok(response); - } -} - -pub struct ZetaEditPredictionProvider { - zeta: Entity, - singleton_buffer: Option>, - pending_completions: ArrayVec, - next_pending_completion_id: usize, - current_completion: Option, - last_request_timestamp: Instant, - project: Entity, -} - -impl ZetaEditPredictionProvider { - pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300); - - pub fn new( - zeta: Entity, - project: Entity, - singleton_buffer: Option>, - ) -> Self { - Self { - zeta, - singleton_buffer, - pending_completions: ArrayVec::new(), - next_pending_completion_id: 0, - current_completion: None, - last_request_timestamp: Instant::now(), - project, - } - } -} - -impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { - fn name() -> &'static str { - "zed-predict" - } - - fn display_name() -> &'static str { - "Zed's Edit Predictions" - } - - fn show_completions_in_menu() -> bool { - true - } - - fn show_tab_accept_marker() -> bool { - true - } - - fn data_collection_state(&self, cx: &App) -> DataCollectionState { - if let Some(buffer) = &self.singleton_buffer - && let Some(file) = buffer.read(cx).file() - { - let is_project_open_source = self.zeta.read(cx).is_file_open_source(file, cx); - if self.zeta.read(cx).data_collection_choice.is_enabled() { - DataCollectionState::Enabled { - is_project_open_source, - } - } else { - DataCollectionState::Disabled { - is_project_open_source, - } - } - } else { - return DataCollectionState::Disabled { - is_project_open_source: false, - }; - } - } - - fn toggle_data_collection(&mut self, cx: &mut App) { - self.zeta - .update(cx, |zeta, cx| zeta.toggle_data_collection_choice(cx)); - } - - fn usage(&self, cx: &App) -> Option { - self.zeta.read(cx).usage(cx) - } - - fn is_enabled( - &self, - _buffer: &Entity, - _cursor_position: language::Anchor, - _cx: &App, - ) -> bool { - true - } - fn is_refreshing(&self) -> bool { - !self.pending_completions.is_empty() - } - - fn refresh( - &mut self, - buffer: Entity, - position: language::Anchor, - _debounce: bool, - cx: &mut Context, - ) { - if self.zeta.read(cx).update_required { - return; - } - - if self - .zeta - .read(cx) - .user_store - .read_with(cx, |user_store, _cx| { - user_store.account_too_young() || user_store.has_overdue_invoices() - }) - { - return; - } - - if let Some(current_completion) = self.current_completion.as_ref() { - let snapshot = buffer.read(cx).snapshot(); - if current_completion - .completion - .interpolate(&snapshot) - .is_some() - { - return; - } - } - - let pending_completion_id = self.next_pending_completion_id; - self.next_pending_completion_id += 1; - let last_request_timestamp = self.last_request_timestamp; - - let project = self.project.clone(); - let task = cx.spawn(async move |this, cx| { - if let Some(timeout) = (last_request_timestamp + Self::THROTTLE_TIMEOUT) - .checked_duration_since(Instant::now()) - { - cx.background_executor().timer(timeout).await; - } - - let completion_request = this.update(cx, |this, cx| { - this.last_request_timestamp = Instant::now(); - this.zeta.update(cx, |zeta, cx| { - zeta.request_completion(&project, &buffer, position, cx) - }) - }); - - let completion = match completion_request { - Ok(completion_request) => { - let completion_request = completion_request.await; - completion_request.map(|c| { - c.map(|completion| CurrentEditPrediction { - buffer_id: buffer.entity_id(), - completion, - }) - }) - } - Err(error) => Err(error), - }; - let Some(new_completion) = completion - .context("edit prediction failed") - .log_err() - .flatten() - else { - this.update(cx, |this, cx| { - if this.pending_completions[0].id == pending_completion_id { - this.pending_completions.remove(0); - } else { - this.pending_completions.clear(); - } - - cx.notify(); - }) - .ok(); - return; - }; - - this.update(cx, |this, cx| { - if this.pending_completions[0].id == pending_completion_id { - this.pending_completions.remove(0); - } else { - this.pending_completions.clear(); - } - - if let Some(old_completion) = this.current_completion.as_ref() { - let snapshot = buffer.read(cx).snapshot(); - if new_completion.should_replace_completion(old_completion, &snapshot) { - this.zeta.update(cx, |zeta, cx| { - zeta.completion_shown(&new_completion.completion, cx); - }); - this.current_completion = Some(new_completion); - } - } else { - this.zeta.update(cx, |zeta, cx| { - zeta.completion_shown(&new_completion.completion, cx); - }); - this.current_completion = Some(new_completion); - } - - cx.notify(); - }) - .ok(); - }); - - // We always maintain at most two pending completions. When we already - // have two, we replace the newest one. - if self.pending_completions.len() <= 1 { - self.pending_completions.push(PendingCompletion { - id: pending_completion_id, - _task: task, - }); - } else if self.pending_completions.len() == 2 { - self.pending_completions.pop(); - self.pending_completions.push(PendingCompletion { - id: pending_completion_id, - _task: task, - }); - } - } - - fn cycle( - &mut self, - _buffer: Entity, - _cursor_position: language::Anchor, - _direction: edit_prediction::Direction, - _cx: &mut Context, - ) { - // Right now we don't support cycling. - } - - fn accept(&mut self, cx: &mut Context) { - let completion_id = self - .current_completion - .as_ref() - .map(|completion| completion.completion.id); - if let Some(completion_id) = completion_id { - self.zeta - .update(cx, |zeta, cx| { - zeta.accept_edit_prediction(completion_id, cx) - }) - .detach(); - } - self.pending_completions.clear(); - } - - fn discard(&mut self, _cx: &mut Context) { - self.pending_completions.clear(); - self.current_completion.take(); - } - - fn suggest( - &mut self, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &mut Context, - ) -> Option { - let CurrentEditPrediction { - buffer_id, - completion, - .. - } = self.current_completion.as_mut()?; - - // Invalidate previous completion if it was generated for a different buffer. - if *buffer_id != buffer.entity_id() { - self.current_completion.take(); - return None; - } - - let buffer = buffer.read(cx); - let Some(edits) = completion.interpolate(&buffer.snapshot()) else { - self.current_completion.take(); - return None; - }; - - let cursor_row = cursor_position.to_point(buffer).row; - let (closest_edit_ix, (closest_edit_range, _)) = - edits.iter().enumerate().min_by_key(|(_, (range, _))| { - let distance_from_start = cursor_row.abs_diff(range.start.to_point(buffer).row); - let distance_from_end = cursor_row.abs_diff(range.end.to_point(buffer).row); - cmp::min(distance_from_start, distance_from_end) - })?; - - let mut edit_start_ix = closest_edit_ix; - for (range, _) in edits[..edit_start_ix].iter().rev() { - let distance_from_closest_edit = - closest_edit_range.start.to_point(buffer).row - range.end.to_point(buffer).row; - if distance_from_closest_edit <= 1 { - edit_start_ix -= 1; - } else { - break; - } - } - - let mut edit_end_ix = closest_edit_ix + 1; - for (range, _) in &edits[edit_end_ix..] { - let distance_from_closest_edit = - range.start.to_point(buffer).row - closest_edit_range.end.to_point(buffer).row; - if distance_from_closest_edit <= 1 { - edit_end_ix += 1; - } else { - break; - } - } - - Some(edit_prediction::EditPrediction::Local { - id: Some(completion.id.to_string().into()), - edits: edits[edit_start_ix..edit_end_ix].to_vec(), - edit_preview: Some(completion.edit_preview.clone()), - }) - } -} - -/// Typical number of string bytes per token for the purposes of limiting model input. This is -/// intentionally low to err on the side of underestimating limits. -const BYTES_PER_TOKEN_GUESS: usize = 3; - -fn guess_token_count(bytes: usize) -> usize { - bytes / BYTES_PER_TOKEN_GUESS -} - -#[cfg(test)] -mod tests { - use client::test::FakeServer; - use clock::{FakeSystemClock, ReplicaId}; - use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; - use gpui::TestAppContext; - use http_client::FakeHttpClient; - use indoc::indoc; - use language::Point; - use parking_lot::Mutex; - use serde_json::json; - use settings::SettingsStore; - use util::{path, rel_path::rel_path}; - - use super::*; - - const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt"); - - #[gpui::test] - async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx)); - let edits: Arc<[(Range, Arc)]> = cx.update(|cx| { - to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into() - }); - - let edit_preview = cx - .read(|cx| buffer.read(cx).preview_edits(edits.clone(), cx)) - .await; - - let completion = EditPrediction { - edits, - edit_preview, - path: Path::new("").into(), - snapshot: cx.read(|cx| buffer.read(cx).snapshot()), - id: EditPredictionId(Uuid::new_v4()), - excerpt_range: 0..0, - cursor_offset: 0, - input_outline: "".into(), - input_events: "".into(), - input_excerpt: "".into(), - output_excerpt: "".into(), - buffer_snapshotted_at: Instant::now(), - response_received_at: Instant::now(), - }; - - cx.update(|cx| { - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(2..5, "REM".into()), (9..11, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(2..2, "REM".into()), (6..8, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.undo(cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(2..5, "REM".into()), (9..11, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(3..3, "EM".into()), (7..9, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(4..4, "M".into()), (8..10, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(9..11, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(4..4, "M".into()), (8..10, "".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx)); - assert_eq!( - from_completion_edits( - &completion.interpolate(&buffer.read(cx).snapshot()).unwrap(), - &buffer, - cx - ), - vec![(4..4, "M".into())] - ); - - buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx)); - assert_eq!(completion.interpolate(&buffer.read(cx).snapshot()), None); - }) - } - - #[gpui::test] - async fn test_clean_up_diff(cx: &mut TestAppContext) { - init_test(cx); - - assert_eq!( - apply_edit_prediction( - indoc! {" - fn main() { - let word_1 = \"lorem\"; - let range = word.len()..word.len(); - } - "}, - indoc! {" - <|editable_region_start|> - fn main() { - let word_1 = \"lorem\"; - let range = word_1.len()..word_1.len(); - } - - <|editable_region_end|> - "}, - cx, - ) - .await, - indoc! {" - fn main() { - let word_1 = \"lorem\"; - let range = word_1.len()..word_1.len(); - } - "}, - ); - - assert_eq!( - apply_edit_prediction( - indoc! {" - fn main() { - let story = \"the quick\" - } - "}, - indoc! {" - <|editable_region_start|> - fn main() { - let story = \"the quick brown fox jumps over the lazy dog\"; - } - - <|editable_region_end|> - "}, - cx, - ) - .await, - indoc! {" - fn main() { - let story = \"the quick brown fox jumps over the lazy dog\"; - } - "}, - ); - } - - #[gpui::test] - async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) { - init_test(cx); - - let buffer_content = "lorem\n"; - let completion_response = indoc! {" - ```animals.js - <|start_of_file|> - <|editable_region_start|> - lorem - ipsum - <|editable_region_end|> - ```"}; - - assert_eq!( - apply_edit_prediction(buffer_content, completion_response, cx).await, - "lorem\nipsum" - ); - } - - #[gpui::test] - async fn test_can_collect_data(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree(path!("/project"), json!({ "LICENSE": BSD_0_TXT })) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/project/src/main.rs"), cx) - }) - .await - .unwrap(); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - true - ); - - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Disabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_no_data_collection_for_remote_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [], cx).await; - - let buffer = cx.new(|_cx| { - Buffer::remote( - language::BufferId::new(1).unwrap(), - ReplicaId::new(1), - language::Capability::ReadWrite, - "fn main() {\n println!(\"Hello\");\n}", - ) - }); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_no_data_collection_for_private_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "LICENSE": BSD_0_TXT, - ".env": "SECRET_KEY=secret" - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer("/project/.env", cx) - }) - .await - .unwrap(); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_no_data_collection_for_untitled_buffer(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [], cx).await; - let buffer = cx.new(|cx| Buffer::local("", cx)); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_no_data_collection_when_closed_source(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree(path!("/project"), json!({ "main.rs": "fn main() {}" })) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer("/project/main.rs", cx) - }) - .await - .unwrap(); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_data_collection_status_changes_on_move(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/open_source_worktree"), - json!({ "LICENSE": BSD_0_TXT, "main.rs": "" }), - ) - .await; - fs.insert_tree(path!("/closed_source_worktree"), json!({ "main.rs": "" })) - .await; - - let project = Project::test( - fs.clone(), - [ - path!("/open_source_worktree").as_ref(), - path!("/closed_source_worktree").as_ref(), - ], - cx, - ) - .await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/open_source_worktree/main.rs"), cx) - }) - .await - .unwrap(); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - true - ); - - let closed_source_file = project - .update(cx, |project, cx| { - let worktree2 = project - .worktree_for_root_name("closed_source_worktree", cx) - .unwrap(); - worktree2.update(cx, |worktree2, cx| { - worktree2.load_file(rel_path("main.rs"), cx) - }) - }) - .await - .unwrap() - .file; - - buffer.update(cx, |buffer, cx| { - buffer.file_updated(closed_source_file, cx); - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - } - - #[gpui::test] - async fn test_no_data_collection_for_events_in_uncollectable_buffers(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/worktree1"), - json!({ "LICENSE": BSD_0_TXT, "main.rs": "", "other.rs": "" }), - ) - .await; - fs.insert_tree(path!("/worktree2"), json!({ "private.rs": "" })) - .await; - - let project = Project::test( - fs.clone(), - [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], - cx, - ) - .await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/worktree1/main.rs"), cx) - }) - .await - .unwrap(); - let private_buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/worktree2/file.rs"), cx) - }) - .await - .unwrap(); - - let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; - zeta.update(cx, |zeta, _cx| { - zeta.data_collection_choice = DataCollectionChoice::Enabled - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - true - ); - - // this has a side effect of registering the buffer to watch for edits - run_edit_prediction(&private_buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - - private_buffer.update(cx, |private_buffer, cx| { - private_buffer.edit([(0..0, "An edit for the history!")], None, cx); - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - false - ); - - // make an edit that uses too many bytes, causing private_buffer edit to not be able to be - // included - buffer.update(cx, |buffer, cx| { - buffer.edit( - [(0..0, " ".repeat(MAX_EVENT_TOKENS * BYTES_PER_TOKEN_GUESS))], - None, - cx, - ); - }); - - run_edit_prediction(&buffer, &project, &zeta, cx).await; - assert_eq!( - captured_request.lock().clone().unwrap().can_collect_data, - true - ); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); - } - - async fn apply_edit_prediction( - buffer_content: &str, - completion_response: &str, - cx: &mut TestAppContext, - ) -> String { - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); - let (zeta, _, response) = make_test_zeta(&project, cx).await; - *response.lock() = completion_response.to_string(); - let edit_prediction = run_edit_prediction(&buffer, &project, &zeta, cx).await; - buffer.update(cx, |buffer, cx| { - buffer.edit(edit_prediction.edits.iter().cloned(), None, cx) - }); - buffer.read_with(cx, |buffer, _| buffer.text()) - } - - async fn run_edit_prediction( - buffer: &Entity, - project: &Entity, - zeta: &Entity, - cx: &mut TestAppContext, - ) -> EditPrediction { - let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); - zeta.update(cx, |zeta, cx| zeta.register_buffer(buffer, &project, cx)); - cx.background_executor.run_until_parked(); - let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(&project, buffer, cursor, cx) - }); - completion_task.await.unwrap().unwrap() - } - - async fn make_test_zeta( - project: &Entity, - cx: &mut TestAppContext, - ) -> ( - Entity, - Arc>>, - Arc>, - ) { - let default_response = indoc! {" - ```main.rs - <|start_of_file|> - <|editable_region_start|> - hello world - <|editable_region_end|> - ```" - }; - let captured_request: Arc>> = Arc::new(Mutex::new(None)); - let completion_response: Arc> = - Arc::new(Mutex::new(default_response.to_string())); - let http_client = FakeHttpClient::create({ - let captured_request = captured_request.clone(); - let completion_response = completion_response.clone(); - move |req| { - let captured_request = captured_request.clone(); - let completion_response = completion_response.clone(); - async move { - match (req.method(), req.uri().path()) { - (&Method::POST, "/client/llm_tokens") => { - Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&CreateLlmTokenResponse { - token: LlmToken("the-llm-token".to_string()), - }) - .unwrap() - .into(), - ) - .unwrap()) - } - (&Method::POST, "/predict_edits/v2") => { - let mut request_body = String::new(); - req.into_body().read_to_string(&mut request_body).await?; - *captured_request.lock() = - Some(serde_json::from_str(&request_body).unwrap()); - Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&PredictEditsResponse { - request_id: Uuid::new_v4().to_string(), - output_excerpt: completion_response.lock().clone(), - }) - .unwrap() - .into(), - ) - .unwrap()) - } - _ => Ok(http_client::Response::builder() - .status(404) - .body("Not Found".into()) - .unwrap()), - } - } - } - }); - - let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); - cx.update(|cx| { - RefreshLlmTokenListener::register(client.clone(), cx); - }); - let _server = FakeServer::for_client(42, &client, cx).await; - - let zeta = cx.new(|cx| { - let mut zeta = Zeta::new(client, project.read(cx).user_store(), cx); - - let worktrees = project.read(cx).worktrees(cx).collect::>(); - for worktree in worktrees { - let worktree_id = worktree.read(cx).id(); - zeta.license_detection_watchers - .entry(worktree_id) - .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx))); - } - - zeta - }); - - (zeta, captured_request, completion_response) - } - - fn to_completion_edits( - iterator: impl IntoIterator, Arc)>, - buffer: &Entity, - cx: &App, - ) -> Vec<(Range, Arc)> { - let buffer = buffer.read(cx); - iterator - .into_iter() - .map(|(range, text)| { - ( - buffer.anchor_after(range.start)..buffer.anchor_before(range.end), - text, - ) - }) - .collect() - } - - fn from_completion_edits( - editor_edits: &[(Range, Arc)], - buffer: &Entity, - cx: &App, - ) -> Vec<(Range, Arc)> { - let buffer = buffer.read(cx); - editor_edits - .iter() - .map(|(range, text)| { - ( - range.start.to_offset(buffer)..range.end.to_offset(buffer), - text.clone(), - ) - }) - .collect() - } - - #[ctor::ctor] - fn init_logger() { - zlog::init_test(); - } -} diff --git a/crates/zeta2/Cargo.toml b/crates/zeta2/Cargo.toml deleted file mode 100644 index 1cb3a866065748f8e39dee7a980b99ea0b6c63fa..0000000000000000000000000000000000000000 --- a/crates/zeta2/Cargo.toml +++ /dev/null @@ -1,57 +0,0 @@ -[package] -name = "zeta2" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/zeta2.rs" - -[features] -llm-response-cache = [] - -[dependencies] -anyhow.workspace = true -arrayvec.workspace = true -chrono.workspace = true -client.workspace = true -cloud_llm_client.workspace = true -cloud_zeta2_prompt.workspace = true -collections.workspace = true -edit_prediction.workspace = true -edit_prediction_context.workspace = true -feature_flags.workspace = true -futures.workspace = true -gpui.workspace = true -indoc.workspace = true -language.workspace = true -language_model.workspace = true -log.workspace = true -open_ai.workspace = true -project.workspace = true -release_channel.workspace = true -serde.workspace = true -serde_json.workspace = true -smol.workspace = true -thiserror.workspace = true -util.workspace = true -uuid.workspace = true -workspace.workspace = true -worktree.workspace = true - -[dev-dependencies] -clock = { workspace = true, features = ["test-support"] } -cloud_llm_client = { workspace = true, features = ["test-support"] } -gpui = { workspace = true, features = ["test-support"] } -lsp.workspace = true -indoc.workspace = true -language = { workspace = true, features = ["test-support"] } -language_model = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/zeta2/src/merge_excerpts.rs b/crates/zeta2/src/merge_excerpts.rs deleted file mode 100644 index 846d8034a8c2e88b8552dc8c9d48af6ccdc5efcf..0000000000000000000000000000000000000000 --- a/crates/zeta2/src/merge_excerpts.rs +++ /dev/null @@ -1,184 +0,0 @@ -use cloud_llm_client::predict_edits_v3::Excerpt; -use edit_prediction_context::Line; -use language::{BufferSnapshot, Point}; -use std::ops::Range; - -pub fn merge_excerpts( - buffer: &BufferSnapshot, - sorted_line_ranges: impl IntoIterator>, -) -> Vec { - let mut output = Vec::new(); - let mut merged_ranges = Vec::>::new(); - - for line_range in sorted_line_ranges { - if let Some(last_line_range) = merged_ranges.last_mut() - && line_range.start <= last_line_range.end - { - last_line_range.end = last_line_range.end.max(line_range.end); - continue; - } - merged_ranges.push(line_range); - } - - let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None); - let mut outline_items = outline_items.into_iter().peekable(); - - for range in merged_ranges { - let point_range = Point::new(range.start.0, 0)..Point::new(range.end.0, 0); - - while let Some(outline_item) = outline_items.peek() { - if outline_item.range.start >= point_range.start { - break; - } - if outline_item.range.end > point_range.start { - let mut point_range = outline_item.source_range_for_text.clone(); - point_range.start.column = 0; - point_range.end.column = buffer.line_len(point_range.end.row); - - output.push(Excerpt { - start_line: Line(point_range.start.row), - text: buffer - .text_for_range(point_range.clone()) - .collect::() - .into(), - }) - } - outline_items.next(); - } - - output.push(Excerpt { - start_line: Line(point_range.start.row), - text: buffer - .text_for_range(point_range.clone()) - .collect::() - .into(), - }) - } - - output -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use super::*; - use cloud_llm_client::predict_edits_v3; - use gpui::{TestAppContext, prelude::*}; - use indoc::indoc; - use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt}; - use pretty_assertions::assert_eq; - use util::test::marked_text_ranges; - - #[gpui::test] - fn test_rust(cx: &mut TestAppContext) { - let table = [ - ( - indoc! {r#" - struct User { - first_name: String, - « last_name: String, - ageˇ: u32, - » email: String, - create_at: Instant, - } - - impl User { - pub fn first_name(&self) -> String { - self.first_name.clone() - } - - pub fn full_name(&self) -> String { - « format!("{} {}", self.first_name, self.last_name) - » } - } - "#}, - indoc! {r#" - 1|struct User { - … - 3| last_name: String, - 4| age<|cursor|>: u32, - … - 9|impl User { - … - 14| pub fn full_name(&self) -> String { - 15| format!("{} {}", self.first_name, self.last_name) - … - "#}, - ), - ( - indoc! {r#" - struct User { - first_name: String, - « last_name: String, - age: u32, - } - »"# - }, - indoc! {r#" - 1|struct User { - … - 3| last_name: String, - 4| age: u32, - 5|} - "#}, - ), - ]; - - for (input, expected_output) in table { - let input_without_ranges = input.replace(['«', '»'], ""); - let input_without_caret = input.replace('ˇ', ""); - let cursor_offset = input_without_ranges.find('ˇ'); - let (input, ranges) = marked_text_ranges(&input_without_caret, false); - let buffer = - cx.new(|cx| Buffer::local(input, cx).with_language(Arc::new(rust_lang()), cx)); - buffer.read_with(cx, |buffer, _cx| { - let insertions = cursor_offset - .map(|offset| { - let point = buffer.offset_to_point(offset); - vec![( - predict_edits_v3::Point { - line: Line(point.row), - column: point.column, - }, - "<|cursor|>", - )] - }) - .unwrap_or_default(); - let ranges: Vec> = ranges - .into_iter() - .map(|range| { - let point_range = range.to_point(&buffer); - Line(point_range.start.row)..Line(point_range.end.row) - }) - .collect(); - - let mut output = String::new(); - cloud_zeta2_prompt::write_excerpts( - merge_excerpts(&buffer.snapshot(), ranges).iter(), - &insertions, - Line(buffer.max_point().row), - true, - &mut output, - ); - assert_eq!(output, expected_output); - }); - } - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(language::tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } -} diff --git a/crates/zeta2/src/provider.rs b/crates/zeta2/src/provider.rs deleted file mode 100644 index a19e7f9a1da5e1808c48e3ce0469d8b390698760..0000000000000000000000000000000000000000 --- a/crates/zeta2/src/provider.rs +++ /dev/null @@ -1,267 +0,0 @@ -use std::{ - cmp, - sync::Arc, - time::{Duration, Instant}, -}; - -use arrayvec::ArrayVec; -use client::{Client, UserStore}; -use edit_prediction::{DataCollectionState, Direction, EditPredictionProvider}; -use gpui::{App, Entity, Task, prelude::*}; -use language::ToPoint as _; -use project::Project; -use util::ResultExt as _; - -use crate::{BufferEditPrediction, Zeta}; - -pub struct ZetaEditPredictionProvider { - zeta: Entity, - next_pending_prediction_id: usize, - pending_predictions: ArrayVec, - last_request_timestamp: Instant, - project: Entity, -} - -impl ZetaEditPredictionProvider { - pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300); - - pub fn new( - project: Entity, - client: &Arc, - user_store: &Entity, - cx: &mut App, - ) -> Self { - let zeta = Zeta::global(client, user_store, cx); - zeta.update(cx, |zeta, cx| { - zeta.register_project(&project, cx); - }); - - Self { - zeta, - next_pending_prediction_id: 0, - pending_predictions: ArrayVec::new(), - last_request_timestamp: Instant::now(), - project: project, - } - } -} - -struct PendingPrediction { - id: usize, - _task: Task<()>, -} - -impl EditPredictionProvider for ZetaEditPredictionProvider { - fn name() -> &'static str { - "zed-predict2" - } - - fn display_name() -> &'static str { - "Zed's Edit Predictions 2" - } - - fn show_completions_in_menu() -> bool { - true - } - - fn show_tab_accept_marker() -> bool { - true - } - - fn data_collection_state(&self, _cx: &App) -> DataCollectionState { - // TODO [zeta2] - DataCollectionState::Unsupported - } - - fn toggle_data_collection(&mut self, _cx: &mut App) { - // TODO [zeta2] - } - - fn usage(&self, cx: &App) -> Option { - self.zeta.read(cx).usage(cx) - } - - fn is_enabled( - &self, - _buffer: &Entity, - _cursor_position: language::Anchor, - _cx: &App, - ) -> bool { - true - } - - fn is_refreshing(&self) -> bool { - !self.pending_predictions.is_empty() - } - - fn refresh( - &mut self, - buffer: Entity, - cursor_position: language::Anchor, - _debounce: bool, - cx: &mut Context, - ) { - let zeta = self.zeta.read(cx); - - if zeta.user_store.read_with(cx, |user_store, _cx| { - user_store.account_too_young() || user_store.has_overdue_invoices() - }) { - return; - } - - if let Some(current) = zeta.current_prediction_for_buffer(&buffer, &self.project, cx) - && let BufferEditPrediction::Local { prediction } = current - && prediction.interpolate(buffer.read(cx)).is_some() - { - return; - } - - self.zeta.update(cx, |zeta, cx| { - zeta.refresh_context_if_needed(&self.project, &buffer, cursor_position, cx); - }); - - let pending_prediction_id = self.next_pending_prediction_id; - self.next_pending_prediction_id += 1; - let last_request_timestamp = self.last_request_timestamp; - - let project = self.project.clone(); - let task = cx.spawn(async move |this, cx| { - if let Some(timeout) = (last_request_timestamp + Self::THROTTLE_TIMEOUT) - .checked_duration_since(Instant::now()) - { - cx.background_executor().timer(timeout).await; - } - - let refresh_task = this.update(cx, |this, cx| { - this.last_request_timestamp = Instant::now(); - this.zeta.update(cx, |zeta, cx| { - zeta.refresh_prediction(&project, &buffer, cursor_position, cx) - }) - }); - - if let Some(refresh_task) = refresh_task.ok() { - refresh_task.await.log_err(); - } - - this.update(cx, |this, cx| { - if this.pending_predictions[0].id == pending_prediction_id { - this.pending_predictions.remove(0); - } else { - this.pending_predictions.clear(); - } - - cx.notify(); - }) - .ok(); - }); - - // We always maintain at most two pending predictions. When we already - // have two, we replace the newest one. - if self.pending_predictions.len() <= 1 { - self.pending_predictions.push(PendingPrediction { - id: pending_prediction_id, - _task: task, - }); - } else if self.pending_predictions.len() == 2 { - self.pending_predictions.pop(); - self.pending_predictions.push(PendingPrediction { - id: pending_prediction_id, - _task: task, - }); - } - - cx.notify(); - } - - fn cycle( - &mut self, - _buffer: Entity, - _cursor_position: language::Anchor, - _direction: Direction, - _cx: &mut Context, - ) { - } - - fn accept(&mut self, cx: &mut Context) { - self.zeta.update(cx, |zeta, cx| { - zeta.accept_current_prediction(&self.project, cx); - }); - self.pending_predictions.clear(); - } - - fn discard(&mut self, cx: &mut Context) { - self.zeta.update(cx, |zeta, _cx| { - zeta.discard_current_prediction(&self.project); - }); - self.pending_predictions.clear(); - } - - fn suggest( - &mut self, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &mut Context, - ) -> Option { - let prediction = - self.zeta - .read(cx) - .current_prediction_for_buffer(buffer, &self.project, cx)?; - - let prediction = match prediction { - BufferEditPrediction::Local { prediction } => prediction, - BufferEditPrediction::Jump { prediction } => { - return Some(edit_prediction::EditPrediction::Jump { - id: Some(prediction.id.to_string().into()), - snapshot: prediction.snapshot.clone(), - target: prediction.edits.first().unwrap().0.start, - }); - } - }; - - let buffer = buffer.read(cx); - let snapshot = buffer.snapshot(); - - let Some(edits) = prediction.interpolate(&snapshot) else { - self.zeta.update(cx, |zeta, _cx| { - zeta.discard_current_prediction(&self.project); - }); - return None; - }; - - let cursor_row = cursor_position.to_point(&snapshot).row; - let (closest_edit_ix, (closest_edit_range, _)) = - edits.iter().enumerate().min_by_key(|(_, (range, _))| { - let distance_from_start = cursor_row.abs_diff(range.start.to_point(&snapshot).row); - let distance_from_end = cursor_row.abs_diff(range.end.to_point(&snapshot).row); - cmp::min(distance_from_start, distance_from_end) - })?; - - let mut edit_start_ix = closest_edit_ix; - for (range, _) in edits[..edit_start_ix].iter().rev() { - let distance_from_closest_edit = closest_edit_range.start.to_point(&snapshot).row - - range.end.to_point(&snapshot).row; - if distance_from_closest_edit <= 1 { - edit_start_ix -= 1; - } else { - break; - } - } - - let mut edit_end_ix = closest_edit_ix + 1; - for (range, _) in &edits[edit_end_ix..] { - let distance_from_closest_edit = - range.start.to_point(buffer).row - closest_edit_range.end.to_point(&snapshot).row; - if distance_from_closest_edit <= 1 { - edit_end_ix += 1; - } else { - break; - } - } - - Some(edit_prediction::EditPrediction::Local { - id: Some(prediction.id.to_string().into()), - edits: edits[edit_start_ix..edit_end_ix].to_vec(), - edit_preview: Some(prediction.edit_preview.clone()), - }) - } -} diff --git a/crates/zeta2/src/retrieval_search.rs b/crates/zeta2/src/retrieval_search.rs deleted file mode 100644 index f735f44cad9623711e5ed9a1293a74e34e084888..0000000000000000000000000000000000000000 --- a/crates/zeta2/src/retrieval_search.rs +++ /dev/null @@ -1,549 +0,0 @@ -use std::ops::Range; - -use anyhow::Result; -use cloud_zeta2_prompt::retrieval_prompt::SearchToolQuery; -use collections::HashMap; -use futures::{ - StreamExt, - channel::mpsc::{self, UnboundedSender}, -}; -use gpui::{AppContext, AsyncApp, Entity}; -use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt, Point, ToOffset, ToPoint}; -use project::{ - Project, WorktreeSettings, - search::{SearchQuery, SearchResult}, -}; -use smol::channel; -use util::{ - ResultExt as _, - paths::{PathMatcher, PathStyle}, -}; -use workspace::item::Settings as _; - -pub async fn run_retrieval_searches( - project: Entity, - queries: Vec, - cx: &mut AsyncApp, -) -> Result, Vec>>> { - let (exclude_matcher, path_style) = project.update(cx, |project, cx| { - let global_settings = WorktreeSettings::get_global(cx); - let exclude_patterns = global_settings - .file_scan_exclusions - .sources() - .iter() - .chain(global_settings.private_files.sources().iter()); - let path_style = project.path_style(cx); - anyhow::Ok((PathMatcher::new(exclude_patterns, path_style)?, path_style)) - })??; - - let (results_tx, mut results_rx) = mpsc::unbounded(); - - for query in queries { - let exclude_matcher = exclude_matcher.clone(); - let results_tx = results_tx.clone(); - let project = project.clone(); - cx.spawn(async move |cx| { - run_query( - query, - results_tx.clone(), - path_style, - exclude_matcher, - &project, - cx, - ) - .await - .log_err(); - }) - .detach() - } - drop(results_tx); - - cx.background_spawn(async move { - let mut results: HashMap, Vec>> = HashMap::default(); - let mut snapshots = HashMap::default(); - - let mut total_bytes = 0; - 'outer: while let Some((buffer, snapshot, excerpts)) = results_rx.next().await { - snapshots.insert(buffer.entity_id(), snapshot); - let existing = results.entry(buffer).or_default(); - existing.reserve(excerpts.len()); - - for (range, size) in excerpts { - // Blunt trimming of the results until we have a proper algorithmic filtering step - if (total_bytes + size) > MAX_RESULTS_LEN { - log::trace!("Combined results reached limit of {MAX_RESULTS_LEN}B"); - break 'outer; - } - total_bytes += size; - existing.push(range); - } - } - - for (buffer, ranges) in results.iter_mut() { - if let Some(snapshot) = snapshots.get(&buffer.entity_id()) { - ranges.sort_unstable_by(|a, b| { - a.start - .cmp(&b.start, snapshot) - .then(b.end.cmp(&b.end, snapshot)) - }); - - let mut index = 1; - while index < ranges.len() { - if ranges[index - 1] - .end - .cmp(&ranges[index].start, snapshot) - .is_gt() - { - let removed = ranges.remove(index); - ranges[index - 1].end = removed.end; - } else { - index += 1; - } - } - } - } - - Ok(results) - }) - .await -} - -const MAX_EXCERPT_LEN: usize = 768; -const MAX_RESULTS_LEN: usize = MAX_EXCERPT_LEN * 5; - -struct SearchJob { - buffer: Entity, - snapshot: BufferSnapshot, - ranges: Vec>, - query_ix: usize, - jobs_tx: channel::Sender, -} - -async fn run_query( - input_query: SearchToolQuery, - results_tx: UnboundedSender<(Entity, BufferSnapshot, Vec<(Range, usize)>)>, - path_style: PathStyle, - exclude_matcher: PathMatcher, - project: &Entity, - cx: &mut AsyncApp, -) -> Result<()> { - let include_matcher = PathMatcher::new(vec![input_query.glob], path_style)?; - - let make_search = |regex: &str| -> Result { - SearchQuery::regex( - regex, - false, - true, - false, - true, - include_matcher.clone(), - exclude_matcher.clone(), - true, - None, - ) - }; - - if let Some(outer_syntax_regex) = input_query.syntax_node.first() { - let outer_syntax_query = make_search(outer_syntax_regex)?; - let nested_syntax_queries = input_query - .syntax_node - .into_iter() - .skip(1) - .map(|query| make_search(&query)) - .collect::>>()?; - let content_query = input_query - .content - .map(|regex| make_search(®ex)) - .transpose()?; - - let (jobs_tx, jobs_rx) = channel::unbounded(); - - let outer_search_results_rx = - project.update(cx, |project, cx| project.search(outer_syntax_query, cx))?; - - let outer_search_task = cx.spawn(async move |cx| { - futures::pin_mut!(outer_search_results_rx); - while let Some(SearchResult::Buffer { buffer, ranges }) = - outer_search_results_rx.next().await - { - buffer - .read_with(cx, |buffer, _| buffer.parsing_idle())? - .await; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - let expanded_ranges: Vec<_> = ranges - .into_iter() - .filter_map(|range| expand_to_parent_range(&range, &snapshot)) - .collect(); - jobs_tx - .send(SearchJob { - buffer, - snapshot, - ranges: expanded_ranges, - query_ix: 0, - jobs_tx: jobs_tx.clone(), - }) - .await?; - } - anyhow::Ok(()) - }); - - let n_workers = cx.background_executor().num_cpus(); - let search_job_task = cx.background_executor().scoped(|scope| { - for _ in 0..n_workers { - scope.spawn(async { - while let Ok(job) = jobs_rx.recv().await { - process_nested_search_job( - &results_tx, - &nested_syntax_queries, - &content_query, - job, - ) - .await; - } - }); - } - }); - - search_job_task.await; - outer_search_task.await?; - } else if let Some(content_regex) = &input_query.content { - let search_query = make_search(&content_regex)?; - - let results_rx = project.update(cx, |project, cx| project.search(search_query, cx))?; - futures::pin_mut!(results_rx); - - while let Some(SearchResult::Buffer { buffer, ranges }) = results_rx.next().await { - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - - let ranges = ranges - .into_iter() - .map(|range| { - let range = range.to_offset(&snapshot); - let range = expand_to_entire_lines(range, &snapshot); - let size = range.len(); - let range = - snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end); - (range, size) - }) - .collect(); - - let send_result = results_tx.unbounded_send((buffer.clone(), snapshot.clone(), ranges)); - - if let Err(err) = send_result - && !err.is_disconnected() - { - log::error!("{err}"); - } - } - } else { - log::warn!("Context gathering model produced a glob-only search"); - } - - anyhow::Ok(()) -} - -async fn process_nested_search_job( - results_tx: &UnboundedSender<(Entity, BufferSnapshot, Vec<(Range, usize)>)>, - queries: &Vec, - content_query: &Option, - job: SearchJob, -) { - if let Some(search_query) = queries.get(job.query_ix) { - let mut subranges = Vec::new(); - for range in job.ranges { - let start = range.start; - let search_results = search_query.search(&job.snapshot, Some(range)).await; - for subrange in search_results { - let subrange = start + subrange.start..start + subrange.end; - subranges.extend(expand_to_parent_range(&subrange, &job.snapshot)); - } - } - job.jobs_tx - .send(SearchJob { - buffer: job.buffer, - snapshot: job.snapshot, - ranges: subranges, - query_ix: job.query_ix + 1, - jobs_tx: job.jobs_tx.clone(), - }) - .await - .ok(); - } else { - let ranges = if let Some(content_query) = content_query { - let mut subranges = Vec::new(); - for range in job.ranges { - let start = range.start; - let search_results = content_query.search(&job.snapshot, Some(range)).await; - for subrange in search_results { - let subrange = start + subrange.start..start + subrange.end; - subranges.push(subrange); - } - } - subranges - } else { - job.ranges - }; - - let matches = ranges - .into_iter() - .map(|range| { - let snapshot = &job.snapshot; - let range = expand_to_entire_lines(range, snapshot); - let size = range.len(); - let range = snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end); - (range, size) - }) - .collect(); - - let send_result = results_tx.unbounded_send((job.buffer, job.snapshot, matches)); - - if let Err(err) = send_result - && !err.is_disconnected() - { - log::error!("{err}"); - } - } -} - -fn expand_to_entire_lines(range: Range, snapshot: &BufferSnapshot) -> Range { - let mut point_range = range.to_point(snapshot); - point_range.start.column = 0; - if point_range.end.column > 0 { - point_range.end = snapshot.max_point().min(point_range.end + Point::new(1, 0)); - } - point_range.to_offset(snapshot) -} - -fn expand_to_parent_range( - range: &Range, - snapshot: &BufferSnapshot, -) -> Option> { - let mut line_range = range.to_point(&snapshot); - line_range.start.column = snapshot.indent_size_for_line(line_range.start.row).len; - line_range.end.column = snapshot.line_len(line_range.end.row); - // TODO skip result if matched line isn't the first node line? - - let node = snapshot.syntax_ancestor(line_range)?; - Some(node.byte_range()) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::merge_excerpts::merge_excerpts; - use cloud_zeta2_prompt::write_codeblock; - use edit_prediction_context::Line; - use gpui::TestAppContext; - use indoc::indoc; - use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust}; - use pretty_assertions::assert_eq; - use project::FakeFs; - use serde_json::json; - use settings::SettingsStore; - use std::path::Path; - use util::path; - - #[gpui::test] - async fn test_retrieval(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "user.rs": indoc!{" - pub struct Organization { - owner: Arc, - } - - pub struct User { - first_name: String, - last_name: String, - } - - impl Organization { - pub fn owner(&self) -> Arc { - self.owner.clone() - } - } - - impl User { - pub fn new(first_name: String, last_name: String) -> Self { - Self { - first_name, - last_name - } - } - - pub fn first_name(&self) -> String { - self.first_name.clone() - } - - pub fn last_name(&self) -> String { - self.last_name.clone() - } - } - "}, - "main.rs": indoc!{r#" - fn main() { - let user = User::new(FIRST_NAME.clone(), "doe".into()); - println!("user {:?}", user); - } - "#}, - }), - ) - .await; - - let project = Project::test(fs, vec![Path::new(path!("/root"))], cx).await; - project.update(cx, |project, _cx| { - project.languages().add(rust_lang().into()) - }); - - assert_results( - &project, - SearchToolQuery { - glob: "user.rs".into(), - syntax_node: vec!["impl\\s+User".into(), "pub\\s+fn\\s+first_name".into()], - content: None, - }, - indoc! {r#" - `````root/user.rs - … - impl User { - … - pub fn first_name(&self) -> String { - self.first_name.clone() - } - … - ````` - "#}, - cx, - ) - .await; - - assert_results( - &project, - SearchToolQuery { - glob: "user.rs".into(), - syntax_node: vec!["impl\\s+User".into()], - content: Some("\\.clone".into()), - }, - indoc! {r#" - `````root/user.rs - … - impl User { - … - pub fn first_name(&self) -> String { - self.first_name.clone() - … - pub fn last_name(&self) -> String { - self.last_name.clone() - … - ````` - "#}, - cx, - ) - .await; - - assert_results( - &project, - SearchToolQuery { - glob: "*.rs".into(), - syntax_node: vec![], - content: Some("\\.clone".into()), - }, - indoc! {r#" - `````root/main.rs - fn main() { - let user = User::new(FIRST_NAME.clone(), "doe".into()); - … - ````` - - `````root/user.rs - … - impl Organization { - pub fn owner(&self) -> Arc { - self.owner.clone() - … - impl User { - … - pub fn first_name(&self) -> String { - self.first_name.clone() - … - pub fn last_name(&self) -> String { - self.last_name.clone() - … - ````` - "#}, - cx, - ) - .await; - } - - async fn assert_results( - project: &Entity, - query: SearchToolQuery, - expected_output: &str, - cx: &mut TestAppContext, - ) { - let results = run_retrieval_searches(project.clone(), vec![query], &mut cx.to_async()) - .await - .unwrap(); - - let mut results = results.into_iter().collect::>(); - results.sort_by_key(|results| { - results - .0 - .read_with(cx, |buffer, _| buffer.file().unwrap().path().clone()) - }); - - let mut output = String::new(); - for (buffer, ranges) in results { - buffer.read_with(cx, |buffer, cx| { - let excerpts = ranges.into_iter().map(|range| { - let point_range = range.to_point(buffer); - if point_range.end.column > 0 { - Line(point_range.start.row)..Line(point_range.end.row + 1) - } else { - Line(point_range.start.row)..Line(point_range.end.row) - } - }); - - write_codeblock( - &buffer.file().unwrap().full_path(cx), - merge_excerpts(&buffer.snapshot(), excerpts).iter(), - &[], - Line(buffer.max_point().row), - false, - &mut output, - ); - }); - } - output.pop(); - - assert_eq!(output, expected_output); - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(move |cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - zlog::init_test(); - }); - } -} diff --git a/crates/zeta2/src/xml_edits.rs b/crates/zeta2/src/xml_edits.rs deleted file mode 100644 index e8bcc4b1ba7eb2d00cd73b0b2e8d1638a5b00e32..0000000000000000000000000000000000000000 --- a/crates/zeta2/src/xml_edits.rs +++ /dev/null @@ -1,197 +0,0 @@ -use anyhow::{Context as _, Result, anyhow}; -use language::{Anchor, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot}; -use std::ops::Range; -use std::path::Path; -use std::sync::Arc; - -pub async fn parse_xml_edits<'a>( - mut input: &'a str, - get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range])> + Send, -) -> Result<(&'a BufferSnapshot, Vec<(Range, Arc)>)> { - let edits_tag = parse_tag(&mut input, "edits")?.context("No edits tag")?; - - input = edits_tag.body; - - let file_path = edits_tag - .attributes - .trim_start() - .strip_prefix("path") - .context("no file attribute on edits tag")? - .trim_end() - .strip_prefix('=') - .context("no value for path attribute")? - .trim() - .trim_start_matches('"') - .trim_end_matches('"'); - - let (buffer, context_ranges) = get_buffer(file_path.as_ref()) - .with_context(|| format!("no buffer for file {file_path}"))?; - - let mut edits = vec![]; - while let Some(old_text_tag) = parse_tag(&mut input, "old_text")? { - let new_text_tag = - parse_tag(&mut input, "new_text")?.context("no new_text tag following old_text")?; - edits.extend(resolve_new_text_old_text_in_buffer( - new_text_tag.body, - old_text_tag.body, - buffer, - context_ranges, - )?); - } - - Ok((buffer, edits)) -} - -fn resolve_new_text_old_text_in_buffer( - new_text: &str, - old_text: &str, - buffer: &TextBufferSnapshot, - ranges: &[Range], -) -> Result, Arc)>, anyhow::Error> { - let context_offset = if old_text.is_empty() { - Ok(0) - } else { - let mut offset = None; - for range in ranges { - let range = range.to_offset(buffer); - let text = buffer.text_for_range(range.clone()).collect::(); - for (match_offset, _) in text.match_indices(old_text) { - if offset.is_some() { - anyhow::bail!("old_text is not unique enough:\n{}", old_text); - } - offset = Some(range.start + match_offset); - } - } - offset.ok_or_else(|| anyhow!("Failed to match old_text:\n{}", old_text)) - }?; - - let edits_within_hunk = language::text_diff(&old_text, &new_text); - Ok(edits_within_hunk - .into_iter() - .map(move |(inner_range, inner_text)| { - ( - buffer.anchor_after(context_offset + inner_range.start) - ..buffer.anchor_before(context_offset + inner_range.end), - inner_text, - ) - })) -} - -struct ParsedTag<'a> { - attributes: &'a str, - body: &'a str, -} - -fn parse_tag<'a>(input: &mut &'a str, tag: &str) -> Result>> { - let open_tag = format!("<{}", tag); - let close_tag = format!("", tag); - let Some(start_ix) = input.find(&open_tag) else { - return Ok(None); - }; - let start_ix = start_ix + open_tag.len(); - let closing_bracket_ix = start_ix - + input[start_ix..] - .find('>') - .with_context(|| format!("missing > after {tag}"))?; - let attributes = &input[start_ix..closing_bracket_ix].trim(); - let end_ix = closing_bracket_ix - + input[closing_bracket_ix..] - .find(&close_tag) - .with_context(|| format!("no `{close_tag}` tag"))?; - let body = &input[closing_bracket_ix + '>'.len_utf8()..end_ix]; - let body = body.strip_prefix('\n').unwrap_or(body); - *input = &input[end_ix + close_tag.len()..]; - Ok(Some(ParsedTag { attributes, body })) -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::TestAppContext; - use indoc::indoc; - use language::Point; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - - #[test] - fn test_parse_tags() { - let mut input = indoc! {r#" - Prelude - - tag value - - "# }; - let parsed = parse_tag(&mut input, "tag").unwrap().unwrap(); - assert_eq!(parsed.attributes, "attr=\"foo\""); - assert_eq!(parsed.body, "tag value\n"); - assert_eq!(input, "\n"); - } - - #[gpui::test] - async fn test_parse_xml_edits(cx: &mut TestAppContext) { - let fs = init_test(cx); - - let buffer_1_text = indoc! {r#" - one two three four - five six seven eight - nine ten eleven twelve - "# }; - - fs.insert_tree( - path!("/root"), - json!({ - "file1": buffer_1_text, - }), - ) - .await; - - let project = Project::test(fs, [path!("/root").as_ref()], cx).await; - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/root/file1"), cx) - }) - .await - .unwrap(); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let edits = indoc! {r#" - - - five six seven eight - - - five SIX seven eight! - - - "#}; - - let (buffer, edits) = parse_xml_edits(edits, |_path| { - Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_])) - }) - .await - .unwrap(); - - let edits = edits - .into_iter() - .map(|(range, text)| (range.to_point(&buffer), text)) - .collect::>(); - assert_eq!( - edits, - &[ - (Point::new(1, 5)..Point::new(1, 8), "SIX".into()), - (Point::new(1, 20)..Point::new(1, 20), "!".into()) - ] - ); - } - - fn init_test(cx: &mut TestAppContext) -> Arc { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - }); - - FakeFs::new(cx.background_executor.clone()) - } -} diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs deleted file mode 100644 index 6139c9c75e16f8805e6529dc1700eef1beacd713..0000000000000000000000000000000000000000 --- a/crates/zeta2/src/zeta2.rs +++ /dev/null @@ -1,2269 +0,0 @@ -use anyhow::{Context as _, Result, anyhow, bail}; -use chrono::TimeDelta; -use client::{Client, EditPredictionUsage, UserStore}; -use cloud_llm_client::predict_edits_v3::{self, PromptFormat, Signature}; -use cloud_llm_client::{ - AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, - ZED_VERSION_HEADER_NAME, -}; -use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery}; -use cloud_zeta2_prompt::{CURSOR_MARKER, DEFAULT_MAX_PROMPT_BYTES}; -use collections::HashMap; -use edit_prediction_context::{ - DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions, - EditPredictionExcerpt, EditPredictionExcerptOptions, EditPredictionScoreOptions, Line, - SyntaxIndex, SyntaxIndexState, -}; -use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; -use futures::AsyncReadExt as _; -use futures::channel::{mpsc, oneshot}; -use gpui::http_client::{AsyncBody, Method}; -use gpui::{ - App, Entity, EntityId, Global, SemanticVersion, SharedString, Subscription, Task, WeakEntity, - http_client, prelude::*, -}; -use language::{Anchor, Buffer, DiagnosticSet, LanguageServerId, ToOffset as _, ToPoint}; -use language::{BufferSnapshot, OffsetRangeExt}; -use language_model::{LlmApiToken, RefreshLlmTokenListener}; -use open_ai::FunctionDefinition; -use project::Project; -use release_channel::AppVersion; -use serde::de::DeserializeOwned; -use std::collections::{VecDeque, hash_map}; - -use std::env; -use std::ops::Range; -use std::path::Path; -use std::str::FromStr as _; -use std::sync::{Arc, LazyLock}; -use std::time::{Duration, Instant}; -use thiserror::Error; -use util::rel_path::RelPathBuf; -use util::{LogErrorFuture, TryFutureExt}; -use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; - -pub mod merge_excerpts; -mod prediction; -mod provider; -pub mod retrieval_search; -pub mod udiff; -mod xml_edits; - -use crate::merge_excerpts::merge_excerpts; -use crate::prediction::EditPrediction; -pub use crate::prediction::EditPredictionId; -pub use provider::ZetaEditPredictionProvider; - -/// Maximum number of events to track. -const MAX_EVENT_COUNT: usize = 16; - -pub const DEFAULT_EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPredictionExcerptOptions { - max_bytes: 512, - min_bytes: 128, - target_before_cursor_over_total_bytes: 0.5, -}; - -pub const DEFAULT_CONTEXT_OPTIONS: ContextMode = - ContextMode::Agentic(DEFAULT_AGENTIC_CONTEXT_OPTIONS); - -pub const DEFAULT_AGENTIC_CONTEXT_OPTIONS: AgenticContextOptions = AgenticContextOptions { - excerpt: DEFAULT_EXCERPT_OPTIONS, -}; - -pub const DEFAULT_SYNTAX_CONTEXT_OPTIONS: EditPredictionContextOptions = - EditPredictionContextOptions { - use_imports: true, - max_retrieved_declarations: 0, - excerpt: DEFAULT_EXCERPT_OPTIONS, - score: EditPredictionScoreOptions { - omit_excerpt_overlaps: true, - }, - }; - -pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions { - context: DEFAULT_CONTEXT_OPTIONS, - max_prompt_bytes: DEFAULT_MAX_PROMPT_BYTES, - max_diagnostic_bytes: 2048, - prompt_format: PromptFormat::DEFAULT, - file_indexing_parallelism: 1, - buffer_change_grouping_interval: Duration::from_secs(1), -}; - -static USE_OLLAMA: LazyLock = - LazyLock::new(|| env::var("ZED_ZETA2_OLLAMA").is_ok_and(|var| !var.is_empty())); -static MODEL_ID: LazyLock = LazyLock::new(|| { - env::var("ZED_ZETA2_MODEL").unwrap_or(if *USE_OLLAMA { - "qwen3-coder:30b".to_string() - } else { - "yqvev8r3".to_string() - }) -}); -static PREDICT_EDITS_URL: LazyLock> = LazyLock::new(|| { - env::var("ZED_PREDICT_EDITS_URL").ok().or_else(|| { - if *USE_OLLAMA { - Some("http://localhost:11434/v1/chat/completions".into()) - } else { - None - } - }) -}); - -pub struct Zeta2FeatureFlag; - -impl FeatureFlag for Zeta2FeatureFlag { - const NAME: &'static str = "zeta2"; - - fn enabled_for_staff() -> bool { - false - } -} - -#[derive(Clone)] -struct ZetaGlobal(Entity); - -impl Global for ZetaGlobal {} - -pub struct Zeta { - client: Arc, - user_store: Entity, - llm_token: LlmApiToken, - _llm_token_subscription: Subscription, - projects: HashMap, - options: ZetaOptions, - update_required: bool, - debug_tx: Option>, - #[cfg(feature = "llm-response-cache")] - llm_response_cache: Option>, -} - -#[cfg(feature = "llm-response-cache")] -pub trait LlmResponseCache: Send + Sync { - fn get_key(&self, url: &gpui::http_client::Url, body: &str) -> u64; - fn read_response(&self, key: u64) -> Option; - fn write_response(&self, key: u64, value: &str); -} - -#[derive(Debug, Clone, PartialEq)] -pub struct ZetaOptions { - pub context: ContextMode, - pub max_prompt_bytes: usize, - pub max_diagnostic_bytes: usize, - pub prompt_format: predict_edits_v3::PromptFormat, - pub file_indexing_parallelism: usize, - pub buffer_change_grouping_interval: Duration, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum ContextMode { - Agentic(AgenticContextOptions), - Syntax(EditPredictionContextOptions), -} - -#[derive(Debug, Clone, PartialEq)] -pub struct AgenticContextOptions { - pub excerpt: EditPredictionExcerptOptions, -} - -impl ContextMode { - pub fn excerpt(&self) -> &EditPredictionExcerptOptions { - match self { - ContextMode::Agentic(options) => &options.excerpt, - ContextMode::Syntax(options) => &options.excerpt, - } - } -} - -#[derive(Debug)] -pub enum ZetaDebugInfo { - ContextRetrievalStarted(ZetaContextRetrievalStartedDebugInfo), - SearchQueriesGenerated(ZetaSearchQueryDebugInfo), - SearchQueriesExecuted(ZetaContextRetrievalDebugInfo), - ContextRetrievalFinished(ZetaContextRetrievalDebugInfo), - EditPredictionRequested(ZetaEditPredictionDebugInfo), -} - -#[derive(Debug)] -pub struct ZetaContextRetrievalStartedDebugInfo { - pub project: Entity, - pub timestamp: Instant, - pub search_prompt: String, -} - -#[derive(Debug)] -pub struct ZetaContextRetrievalDebugInfo { - pub project: Entity, - pub timestamp: Instant, -} - -#[derive(Debug)] -pub struct ZetaEditPredictionDebugInfo { - pub request: predict_edits_v3::PredictEditsRequest, - pub retrieval_time: TimeDelta, - pub buffer: WeakEntity, - pub position: language::Anchor, - pub local_prompt: Result, - pub response_rx: oneshot::Receiver<(Result, TimeDelta)>, -} - -#[derive(Debug)] -pub struct ZetaSearchQueryDebugInfo { - pub project: Entity, - pub timestamp: Instant, - pub search_queries: Vec, -} - -pub type RequestDebugInfo = predict_edits_v3::DebugInfo; - -struct ZetaProject { - syntax_index: Entity, - events: VecDeque, - registered_buffers: HashMap, - current_prediction: Option, - context: Option, Vec>>>, - refresh_context_task: Option>>>, - refresh_context_debounce_task: Option>>, - refresh_context_timestamp: Option, -} - -#[derive(Debug, Clone)] -struct CurrentEditPrediction { - pub requested_by_buffer_id: EntityId, - pub prediction: EditPrediction, -} - -impl CurrentEditPrediction { - fn should_replace_prediction(&self, old_prediction: &Self, cx: &App) -> bool { - let Some(new_edits) = self - .prediction - .interpolate(&self.prediction.buffer.read(cx)) - else { - return false; - }; - - if self.prediction.buffer != old_prediction.prediction.buffer { - return true; - } - - let Some(old_edits) = old_prediction - .prediction - .interpolate(&old_prediction.prediction.buffer.read(cx)) - else { - return true; - }; - - // This reduces the occurrence of UI thrash from replacing edits - // - // TODO: This is fairly arbitrary - should have a more general heuristic that handles multiple edits. - if self.requested_by_buffer_id == self.prediction.buffer.entity_id() - && self.requested_by_buffer_id == old_prediction.prediction.buffer.entity_id() - && old_edits.len() == 1 - && new_edits.len() == 1 - { - let (old_range, old_text) = &old_edits[0]; - let (new_range, new_text) = &new_edits[0]; - new_range == old_range && new_text.starts_with(old_text.as_ref()) - } else { - true - } - } -} - -/// A prediction from the perspective of a buffer. -#[derive(Debug)] -enum BufferEditPrediction<'a> { - Local { prediction: &'a EditPrediction }, - Jump { prediction: &'a EditPrediction }, -} - -struct RegisteredBuffer { - snapshot: BufferSnapshot, - _subscriptions: [gpui::Subscription; 2], -} - -#[derive(Clone)] -pub enum Event { - BufferChange { - old_snapshot: BufferSnapshot, - new_snapshot: BufferSnapshot, - timestamp: Instant, - }, -} - -impl Event { - pub fn to_request_event(&self, cx: &App) -> Option { - match self { - Event::BufferChange { - old_snapshot, - new_snapshot, - .. - } => { - let path = new_snapshot.file().map(|f| f.full_path(cx)); - - let old_path = old_snapshot.file().and_then(|f| { - let old_path = f.full_path(cx); - if Some(&old_path) != path.as_ref() { - Some(old_path) - } else { - None - } - }); - - // TODO [zeta2] move to bg? - let diff = language::unified_diff(&old_snapshot.text(), &new_snapshot.text()); - - if path == old_path && diff.is_empty() { - None - } else { - Some(predict_edits_v3::Event::BufferChange { - old_path, - path, - diff, - //todo: Actually detect if this edit was predicted or not - predicted: false, - }) - } - } - } - } -} - -impl Zeta { - pub fn try_global(cx: &App) -> Option> { - cx.try_global::().map(|global| global.0.clone()) - } - - pub fn global( - client: &Arc, - user_store: &Entity, - cx: &mut App, - ) -> Entity { - cx.try_global::() - .map(|global| global.0.clone()) - .unwrap_or_else(|| { - let zeta = cx.new(|cx| Self::new(client.clone(), user_store.clone(), cx)); - cx.set_global(ZetaGlobal(zeta.clone())); - zeta - }) - } - - pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - - Self { - projects: HashMap::default(), - client, - user_store, - options: DEFAULT_OPTIONS, - llm_token: LlmApiToken::default(), - _llm_token_subscription: cx.subscribe( - &refresh_llm_token_listener, - |this, _listener, _event, cx| { - let client = this.client.clone(); - let llm_token = this.llm_token.clone(); - cx.spawn(async move |_this, _cx| { - llm_token.refresh(&client).await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }, - ), - update_required: false, - debug_tx: None, - #[cfg(feature = "llm-response-cache")] - llm_response_cache: None, - } - } - - #[cfg(feature = "llm-response-cache")] - pub fn with_llm_response_cache(&mut self, cache: Arc) { - self.llm_response_cache = Some(cache); - } - - pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver { - let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded(); - self.debug_tx = Some(debug_watch_tx); - debug_watch_rx - } - - pub fn options(&self) -> &ZetaOptions { - &self.options - } - - pub fn set_options(&mut self, options: ZetaOptions) { - self.options = options; - } - - pub fn clear_history(&mut self) { - for zeta_project in self.projects.values_mut() { - zeta_project.events.clear(); - } - } - - pub fn history_for_project(&self, project: &Entity) -> impl Iterator { - self.projects - .get(&project.entity_id()) - .map(|project| project.events.iter()) - .into_iter() - .flatten() - } - - pub fn context_for_project( - &self, - project: &Entity, - ) -> impl Iterator, &[Range])> { - self.projects - .get(&project.entity_id()) - .and_then(|project| { - Some( - project - .context - .as_ref()? - .iter() - .map(|(buffer, ranges)| (buffer.clone(), ranges.as_slice())), - ) - }) - .into_iter() - .flatten() - } - - pub fn usage(&self, cx: &App) -> Option { - self.user_store.read(cx).edit_prediction_usage() - } - - pub fn register_project(&mut self, project: &Entity, cx: &mut App) { - self.get_or_init_zeta_project(project, cx); - } - - pub fn register_buffer( - &mut self, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) { - let zeta_project = self.get_or_init_zeta_project(project, cx); - Self::register_buffer_impl(zeta_project, buffer, project, cx); - } - - fn get_or_init_zeta_project( - &mut self, - project: &Entity, - cx: &mut App, - ) -> &mut ZetaProject { - self.projects - .entry(project.entity_id()) - .or_insert_with(|| ZetaProject { - syntax_index: cx.new(|cx| { - SyntaxIndex::new(project, self.options.file_indexing_parallelism, cx) - }), - events: VecDeque::new(), - registered_buffers: HashMap::default(), - current_prediction: None, - context: None, - refresh_context_task: None, - refresh_context_debounce_task: None, - refresh_context_timestamp: None, - }) - } - - fn register_buffer_impl<'a>( - zeta_project: &'a mut ZetaProject, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) -> &'a mut RegisteredBuffer { - let buffer_id = buffer.entity_id(); - match zeta_project.registered_buffers.entry(buffer_id) { - hash_map::Entry::Occupied(entry) => entry.into_mut(), - hash_map::Entry::Vacant(entry) => { - let snapshot = buffer.read(cx).snapshot(); - let project_entity_id = project.entity_id(); - entry.insert(RegisteredBuffer { - snapshot, - _subscriptions: [ - cx.subscribe(buffer, { - let project = project.downgrade(); - move |this, buffer, event, cx| { - if let language::BufferEvent::Edited = event - && let Some(project) = project.upgrade() - { - this.report_changes_for_buffer(&buffer, &project, cx); - } - } - }), - cx.observe_release(buffer, move |this, _buffer, _cx| { - let Some(zeta_project) = this.projects.get_mut(&project_entity_id) - else { - return; - }; - zeta_project.registered_buffers.remove(&buffer_id); - }), - ], - }) - } - } - } - - fn report_changes_for_buffer( - &mut self, - buffer: &Entity, - project: &Entity, - cx: &mut Context, - ) -> BufferSnapshot { - let buffer_change_grouping_interval = self.options.buffer_change_grouping_interval; - let zeta_project = self.get_or_init_zeta_project(project, cx); - let registered_buffer = Self::register_buffer_impl(zeta_project, buffer, project, cx); - - let new_snapshot = buffer.read(cx).snapshot(); - if new_snapshot.version != registered_buffer.snapshot.version { - let old_snapshot = - std::mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); - Self::push_event( - zeta_project, - buffer_change_grouping_interval, - Event::BufferChange { - old_snapshot, - new_snapshot: new_snapshot.clone(), - timestamp: Instant::now(), - }, - ); - } - - new_snapshot - } - - fn push_event( - zeta_project: &mut ZetaProject, - buffer_change_grouping_interval: Duration, - event: Event, - ) { - let events = &mut zeta_project.events; - - if buffer_change_grouping_interval > Duration::ZERO - && let Some(Event::BufferChange { - new_snapshot: last_new_snapshot, - timestamp: last_timestamp, - .. - }) = events.back_mut() - { - // Coalesce edits for the same buffer when they happen one after the other. - let Event::BufferChange { - old_snapshot, - new_snapshot, - timestamp, - } = &event; - - if timestamp.duration_since(*last_timestamp) <= buffer_change_grouping_interval - && old_snapshot.remote_id() == last_new_snapshot.remote_id() - && old_snapshot.version == last_new_snapshot.version - { - *last_new_snapshot = new_snapshot.clone(); - *last_timestamp = *timestamp; - return; - } - } - - if events.len() >= MAX_EVENT_COUNT { - // These are halved instead of popping to improve prompt caching. - events.drain(..MAX_EVENT_COUNT / 2); - } - - events.push_back(event); - } - - fn current_prediction_for_buffer( - &self, - buffer: &Entity, - project: &Entity, - cx: &App, - ) -> Option> { - let project_state = self.projects.get(&project.entity_id())?; - - let CurrentEditPrediction { - requested_by_buffer_id, - prediction, - } = project_state.current_prediction.as_ref()?; - - if prediction.targets_buffer(buffer.read(cx)) { - Some(BufferEditPrediction::Local { prediction }) - } else if *requested_by_buffer_id == buffer.entity_id() { - Some(BufferEditPrediction::Jump { prediction }) - } else { - None - } - } - - fn accept_current_prediction(&mut self, project: &Entity, cx: &mut Context) { - let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { - return; - }; - - let Some(prediction) = project_state.current_prediction.take() else { - return; - }; - let request_id = prediction.prediction.id.to_string(); - - let client = self.client.clone(); - let llm_token = self.llm_token.clone(); - let app_version = AppVersion::global(cx); - cx.spawn(async move |this, cx| { - let url = if let Ok(predict_edits_url) = env::var("ZED_ACCEPT_PREDICTION_URL") { - http_client::Url::parse(&predict_edits_url)? - } else { - client - .http_client() - .build_zed_llm_url("/predict_edits/accept", &[])? - }; - - let response = cx - .background_spawn(Self::send_api_request::<()>( - move |builder| { - let req = builder.uri(url.as_ref()).body( - serde_json::to_string(&AcceptEditPredictionBody { - request_id: request_id.clone(), - })? - .into(), - ); - Ok(req?) - }, - client, - llm_token, - app_version, - )) - .await; - - Self::handle_api_response(&this, response, cx)?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - - fn discard_current_prediction(&mut self, project: &Entity) { - if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { - project_state.current_prediction.take(); - }; - } - - pub fn refresh_prediction( - &mut self, - project: &Entity, - buffer: &Entity, - position: language::Anchor, - cx: &mut Context, - ) -> Task> { - let request_task = self.request_prediction(project, buffer, position, cx); - let buffer = buffer.clone(); - let project = project.clone(); - - cx.spawn(async move |this, cx| { - if let Some(prediction) = request_task.await? { - this.update(cx, |this, cx| { - let project_state = this - .projects - .get_mut(&project.entity_id()) - .context("Project not found")?; - - let new_prediction = CurrentEditPrediction { - requested_by_buffer_id: buffer.entity_id(), - prediction: prediction, - }; - - if project_state - .current_prediction - .as_ref() - .is_none_or(|old_prediction| { - new_prediction.should_replace_prediction(&old_prediction, cx) - }) - { - project_state.current_prediction = Some(new_prediction); - } - anyhow::Ok(()) - })??; - } - Ok(()) - }) - } - - pub fn request_prediction( - &mut self, - project: &Entity, - active_buffer: &Entity, - position: language::Anchor, - cx: &mut Context, - ) -> Task>> { - let project_state = self.projects.get(&project.entity_id()); - - let index_state = project_state.map(|state| { - state - .syntax_index - .read_with(cx, |index, _cx| index.state().clone()) - }); - let options = self.options.clone(); - let active_snapshot = active_buffer.read(cx).snapshot(); - let Some(excerpt_path) = active_snapshot - .file() - .map(|path| -> Arc { path.full_path(cx).into() }) - else { - return Task::ready(Err(anyhow!("No file path for excerpt"))); - }; - let client = self.client.clone(); - let llm_token = self.llm_token.clone(); - let app_version = AppVersion::global(cx); - let worktree_snapshots = project - .read(cx) - .worktrees(cx) - .map(|worktree| worktree.read(cx).snapshot()) - .collect::>(); - let debug_tx = self.debug_tx.clone(); - - let events = project_state - .map(|state| { - state - .events - .iter() - .filter_map(|event| event.to_request_event(cx)) - .collect::>() - }) - .unwrap_or_default(); - - let diagnostics = active_snapshot.diagnostic_sets().clone(); - - let parent_abs_path = - project::File::from_dyn(active_buffer.read(cx).file()).and_then(|f| { - let mut path = f.worktree.read(cx).absolutize(&f.path); - if path.pop() { Some(path) } else { None } - }); - - // TODO data collection - let can_collect_data = cx.is_staff(); - - let mut included_files = project_state - .and_then(|project_state| project_state.context.as_ref()) - .unwrap_or(&HashMap::default()) - .iter() - .filter_map(|(buffer_entity, ranges)| { - let buffer = buffer_entity.read(cx); - Some(( - buffer_entity.clone(), - buffer.snapshot(), - buffer.file()?.full_path(cx).into(), - ranges.clone(), - )) - }) - .collect::>(); - - #[cfg(feature = "llm-response-cache")] - let llm_response_cache = self.llm_response_cache.clone(); - - let request_task = cx.background_spawn({ - let active_buffer = active_buffer.clone(); - async move { - let index_state = if let Some(index_state) = index_state { - Some(index_state.lock_owned().await) - } else { - None - }; - - let cursor_offset = position.to_offset(&active_snapshot); - let cursor_point = cursor_offset.to_point(&active_snapshot); - - let before_retrieval = chrono::Utc::now(); - - let (diagnostic_groups, diagnostic_groups_truncated) = - Self::gather_nearby_diagnostics( - cursor_offset, - &diagnostics, - &active_snapshot, - options.max_diagnostic_bytes, - ); - - let cloud_request = match options.context { - ContextMode::Agentic(context_options) => { - let Some(excerpt) = EditPredictionExcerpt::select_from_buffer( - cursor_point, - &active_snapshot, - &context_options.excerpt, - index_state.as_deref(), - ) else { - return Ok((None, None)); - }; - - let excerpt_anchor_range = active_snapshot.anchor_after(excerpt.range.start) - ..active_snapshot.anchor_before(excerpt.range.end); - - if let Some(buffer_ix) = - included_files.iter().position(|(_, snapshot, _, _)| { - snapshot.remote_id() == active_snapshot.remote_id() - }) - { - let (_, buffer, _, ranges) = &mut included_files[buffer_ix]; - let range_ix = ranges - .binary_search_by(|probe| { - probe - .start - .cmp(&excerpt_anchor_range.start, buffer) - .then(excerpt_anchor_range.end.cmp(&probe.end, buffer)) - }) - .unwrap_or_else(|ix| ix); - - ranges.insert(range_ix, excerpt_anchor_range); - let last_ix = included_files.len() - 1; - included_files.swap(buffer_ix, last_ix); - } else { - included_files.push(( - active_buffer.clone(), - active_snapshot, - excerpt_path.clone(), - vec![excerpt_anchor_range], - )); - } - - let included_files = included_files - .iter() - .map(|(_, buffer, path, ranges)| { - let excerpts = merge_excerpts( - &buffer, - ranges.iter().map(|range| { - let point_range = range.to_point(&buffer); - Line(point_range.start.row)..Line(point_range.end.row) - }), - ); - predict_edits_v3::IncludedFile { - path: path.clone(), - max_row: Line(buffer.max_point().row), - excerpts, - } - }) - .collect::>(); - - predict_edits_v3::PredictEditsRequest { - excerpt_path, - excerpt: String::new(), - excerpt_line_range: Line(0)..Line(0), - excerpt_range: 0..0, - cursor_point: predict_edits_v3::Point { - line: predict_edits_v3::Line(cursor_point.row), - column: cursor_point.column, - }, - included_files, - referenced_declarations: vec![], - events, - can_collect_data, - diagnostic_groups, - diagnostic_groups_truncated, - debug_info: debug_tx.is_some(), - prompt_max_bytes: Some(options.max_prompt_bytes), - prompt_format: options.prompt_format, - // TODO [zeta2] - signatures: vec![], - excerpt_parent: None, - git_info: None, - } - } - ContextMode::Syntax(context_options) => { - let Some(context) = EditPredictionContext::gather_context( - cursor_point, - &active_snapshot, - parent_abs_path.as_deref(), - &context_options, - index_state.as_deref(), - ) else { - return Ok((None, None)); - }; - - make_syntax_context_cloud_request( - excerpt_path, - context, - events, - can_collect_data, - diagnostic_groups, - diagnostic_groups_truncated, - None, - debug_tx.is_some(), - &worktree_snapshots, - index_state.as_deref(), - Some(options.max_prompt_bytes), - options.prompt_format, - ) - } - }; - - let prompt_result = cloud_zeta2_prompt::build_prompt(&cloud_request); - - let retrieval_time = chrono::Utc::now() - before_retrieval; - - let debug_response_tx = if let Some(debug_tx) = &debug_tx { - let (response_tx, response_rx) = oneshot::channel(); - - debug_tx - .unbounded_send(ZetaDebugInfo::EditPredictionRequested( - ZetaEditPredictionDebugInfo { - request: cloud_request.clone(), - retrieval_time, - buffer: active_buffer.downgrade(), - local_prompt: match prompt_result.as_ref() { - Ok((prompt, _)) => Ok(prompt.clone()), - Err(err) => Err(err.to_string()), - }, - position, - response_rx, - }, - )) - .ok(); - Some(response_tx) - } else { - None - }; - - if cfg!(debug_assertions) && env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() { - if let Some(debug_response_tx) = debug_response_tx { - debug_response_tx - .send((Err("Request skipped".to_string()), TimeDelta::zero())) - .ok(); - } - anyhow::bail!("Skipping request because ZED_ZETA2_SKIP_REQUEST is set") - } - - let (prompt, _) = prompt_result?; - let request = open_ai::Request { - model: MODEL_ID.clone(), - messages: vec![open_ai::RequestMessage::User { - content: open_ai::MessageContent::Plain(prompt), - }], - stream: false, - max_completion_tokens: None, - stop: Default::default(), - temperature: 0.7, - tool_choice: None, - parallel_tool_calls: None, - tools: vec![], - prompt_cache_key: None, - reasoning_effort: None, - }; - - log::trace!("Sending edit prediction request"); - - let before_request = chrono::Utc::now(); - let response = Self::send_raw_llm_request( - request, - client, - llm_token, - app_version, - #[cfg(feature = "llm-response-cache")] - llm_response_cache, - ) - .await; - let request_time = chrono::Utc::now() - before_request; - - log::trace!("Got edit prediction response"); - - if let Some(debug_response_tx) = debug_response_tx { - debug_response_tx - .send(( - response - .as_ref() - .map_err(|err| err.to_string()) - .map(|response| response.0.clone()), - request_time, - )) - .ok(); - } - - let (res, usage) = response?; - let request_id = EditPredictionId(res.id.clone().into()); - let Some(mut output_text) = text_from_response(res) else { - return Ok((None, usage)); - }; - - if output_text.contains(CURSOR_MARKER) { - log::trace!("Stripping out {CURSOR_MARKER} from response"); - output_text = output_text.replace(CURSOR_MARKER, ""); - } - - let get_buffer_from_context = |path: &Path| { - included_files - .iter() - .find_map(|(_, buffer, probe_path, ranges)| { - if probe_path.as_ref() == path { - Some((buffer, ranges.as_slice())) - } else { - None - } - }) - }; - - let (edited_buffer_snapshot, edits) = match options.prompt_format { - PromptFormat::NumLinesUniDiff => { - crate::udiff::parse_diff(&output_text, get_buffer_from_context).await? - } - PromptFormat::OldTextNewText => { - crate::xml_edits::parse_xml_edits(&output_text, get_buffer_from_context) - .await? - } - _ => { - bail!("unsupported prompt format {}", options.prompt_format) - } - }; - - let edited_buffer = included_files - .iter() - .find_map(|(buffer, snapshot, _, _)| { - if snapshot.remote_id() == edited_buffer_snapshot.remote_id() { - Some(buffer.clone()) - } else { - None - } - }) - .context("Failed to find buffer in included_buffers")?; - - anyhow::Ok(( - Some(( - request_id, - edited_buffer, - edited_buffer_snapshot.clone(), - edits, - )), - usage, - )) - } - }); - - cx.spawn({ - async move |this, cx| { - let Some((id, edited_buffer, edited_buffer_snapshot, edits)) = - Self::handle_api_response(&this, request_task.await, cx)? - else { - return Ok(None); - }; - - // TODO telemetry: duration, etc - Ok( - EditPrediction::new(id, &edited_buffer, &edited_buffer_snapshot, edits, cx) - .await, - ) - } - }) - } - - async fn send_raw_llm_request( - request: open_ai::Request, - client: Arc, - llm_token: LlmApiToken, - app_version: SemanticVersion, - #[cfg(feature = "llm-response-cache")] llm_response_cache: Option< - Arc, - >, - ) -> Result<(open_ai::Response, Option)> { - let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() { - http_client::Url::parse(&predict_edits_url)? - } else { - client - .http_client() - .build_zed_llm_url("/predict_edits/raw", &[])? - }; - - #[cfg(feature = "llm-response-cache")] - let cache_key = if let Some(cache) = llm_response_cache { - let request_json = serde_json::to_string(&request)?; - let key = cache.get_key(&url, &request_json); - - if let Some(response_str) = cache.read_response(key) { - return Ok((serde_json::from_str(&response_str)?, None)); - } - - Some((cache, key)) - } else { - None - }; - - let (response, usage) = Self::send_api_request( - |builder| { - let req = builder - .uri(url.as_ref()) - .body(serde_json::to_string(&request)?.into()); - Ok(req?) - }, - client, - llm_token, - app_version, - ) - .await?; - - #[cfg(feature = "llm-response-cache")] - if let Some((cache, key)) = cache_key { - cache.write_response(key, &serde_json::to_string(&response)?); - } - - Ok((response, usage)) - } - - fn handle_api_response( - this: &WeakEntity, - response: Result<(T, Option)>, - cx: &mut gpui::AsyncApp, - ) -> Result { - match response { - Ok((data, usage)) => { - if let Some(usage) = usage { - this.update(cx, |this, cx| { - this.user_store.update(cx, |user_store, cx| { - user_store.update_edit_prediction_usage(usage, cx); - }); - }) - .ok(); - } - Ok(data) - } - Err(err) => { - if err.is::() { - cx.update(|cx| { - this.update(cx, |this, _cx| { - this.update_required = true; - }) - .ok(); - - let error_message: SharedString = err.to_string().into(); - show_app_notification( - NotificationId::unique::(), - cx, - move |cx| { - cx.new(|cx| { - ErrorMessagePrompt::new(error_message.clone(), cx) - .with_link_button("Update Zed", "https://zed.dev/releases") - }) - }, - ); - }) - .ok(); - } - Err(err) - } - } - } - - async fn send_api_request( - build: impl Fn(http_client::http::request::Builder) -> Result>, - client: Arc, - llm_token: LlmApiToken, - app_version: SemanticVersion, - ) -> Result<(Res, Option)> - where - Res: DeserializeOwned, - { - let http_client = client.http_client(); - let mut token = llm_token.acquire(&client).await?; - let mut did_retry = false; - - loop { - let request_builder = http_client::Request::builder().method(Method::POST); - - let request = build( - request_builder - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", token)) - .header(ZED_VERSION_HEADER_NAME, app_version.to_string()), - )?; - - let mut response = http_client.send(request).await?; - - if let Some(minimum_required_version) = response - .headers() - .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME) - .and_then(|version| SemanticVersion::from_str(version.to_str().ok()?).ok()) - { - anyhow::ensure!( - app_version >= minimum_required_version, - ZedUpdateRequiredError { - minimum_version: minimum_required_version - } - ); - } - - if response.status().is_success() { - let usage = EditPredictionUsage::from_headers(response.headers()).ok(); - - let mut body = Vec::new(); - response.body_mut().read_to_end(&mut body).await?; - return Ok((serde_json::from_slice(&body)?, usage)); - } else if !did_retry - && response - .headers() - .get(EXPIRED_LLM_TOKEN_HEADER_NAME) - .is_some() - { - did_retry = true; - token = llm_token.refresh(&client).await?; - } else { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - anyhow::bail!( - "Request failed with status: {:?}\nBody: {}", - response.status(), - body - ); - } - } - } - - pub const CONTEXT_RETRIEVAL_IDLE_DURATION: Duration = Duration::from_secs(10); - pub const CONTEXT_RETRIEVAL_DEBOUNCE_DURATION: Duration = Duration::from_secs(3); - - // Refresh the related excerpts when the user just beguns editing after - // an idle period, and after they pause editing. - fn refresh_context_if_needed( - &mut self, - project: &Entity, - buffer: &Entity, - cursor_position: language::Anchor, - cx: &mut Context, - ) { - if !matches!(&self.options().context, ContextMode::Agentic { .. }) { - return; - } - - let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) else { - return; - }; - - let now = Instant::now(); - let was_idle = zeta_project - .refresh_context_timestamp - .map_or(true, |timestamp| { - now - timestamp > Self::CONTEXT_RETRIEVAL_IDLE_DURATION - }); - zeta_project.refresh_context_timestamp = Some(now); - zeta_project.refresh_context_debounce_task = Some(cx.spawn({ - let buffer = buffer.clone(); - let project = project.clone(); - async move |this, cx| { - if was_idle { - log::debug!("refetching edit prediction context after idle"); - } else { - cx.background_executor() - .timer(Self::CONTEXT_RETRIEVAL_DEBOUNCE_DURATION) - .await; - log::debug!("refetching edit prediction context after pause"); - } - this.update(cx, |this, cx| { - let task = this.refresh_context(project.clone(), buffer, cursor_position, cx); - - if let Some(zeta_project) = this.projects.get_mut(&project.entity_id()) { - zeta_project.refresh_context_task = Some(task.log_err()); - }; - }) - .ok() - } - })); - } - - // Refresh the related excerpts asynchronously. Ensure the task runs to completion, - // and avoid spawning more than one concurrent task. - pub fn refresh_context( - &mut self, - project: Entity, - buffer: Entity, - cursor_position: language::Anchor, - cx: &mut Context, - ) -> Task> { - let Some(zeta_project) = self.projects.get(&project.entity_id()) else { - return Task::ready(anyhow::Ok(())); - }; - - let ContextMode::Agentic(options) = &self.options().context else { - return Task::ready(anyhow::Ok(())); - }; - - let snapshot = buffer.read(cx).snapshot(); - let cursor_point = cursor_position.to_point(&snapshot); - let Some(cursor_excerpt) = EditPredictionExcerpt::select_from_buffer( - cursor_point, - &snapshot, - &options.excerpt, - None, - ) else { - return Task::ready(Ok(())); - }; - - let app_version = AppVersion::global(cx); - let client = self.client.clone(); - let llm_token = self.llm_token.clone(); - let debug_tx = self.debug_tx.clone(); - let current_file_path: Arc = snapshot - .file() - .map(|f| f.full_path(cx).into()) - .unwrap_or_else(|| Path::new("untitled").into()); - - let prompt = match cloud_zeta2_prompt::retrieval_prompt::build_prompt( - predict_edits_v3::PlanContextRetrievalRequest { - excerpt: cursor_excerpt.text(&snapshot).body, - excerpt_path: current_file_path, - excerpt_line_range: cursor_excerpt.line_range, - cursor_file_max_row: Line(snapshot.max_point().row), - events: zeta_project - .events - .iter() - .filter_map(|ev| ev.to_request_event(cx)) - .collect(), - }, - ) { - Ok(prompt) => prompt, - Err(err) => { - return Task::ready(Err(err)); - } - }; - - if let Some(debug_tx) = &debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted( - ZetaContextRetrievalStartedDebugInfo { - project: project.clone(), - timestamp: Instant::now(), - search_prompt: prompt.clone(), - }, - )) - .ok(); - } - - pub static TOOL_SCHEMA: LazyLock<(serde_json::Value, String)> = LazyLock::new(|| { - let schema = language_model::tool_schema::root_schema_for::( - language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset, - ); - - let description = schema - .get("description") - .and_then(|description| description.as_str()) - .unwrap() - .to_string(); - - (schema.into(), description) - }); - - let (tool_schema, tool_description) = TOOL_SCHEMA.clone(); - - let request = open_ai::Request { - model: MODEL_ID.clone(), - messages: vec![open_ai::RequestMessage::User { - content: open_ai::MessageContent::Plain(prompt), - }], - stream: false, - max_completion_tokens: None, - stop: Default::default(), - temperature: 0.7, - tool_choice: None, - parallel_tool_calls: None, - tools: vec![open_ai::ToolDefinition::Function { - function: FunctionDefinition { - name: cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME.to_string(), - description: Some(tool_description), - parameters: Some(tool_schema), - }, - }], - prompt_cache_key: None, - reasoning_effort: None, - }; - - #[cfg(feature = "llm-response-cache")] - let llm_response_cache = self.llm_response_cache.clone(); - - cx.spawn(async move |this, cx| { - log::trace!("Sending search planning request"); - let response = Self::send_raw_llm_request( - request, - client, - llm_token, - app_version, - #[cfg(feature = "llm-response-cache")] - llm_response_cache, - ) - .await; - let mut response = Self::handle_api_response(&this, response, cx)?; - log::trace!("Got search planning response"); - - let choice = response - .choices - .pop() - .context("No choices in retrieval response")?; - let open_ai::RequestMessage::Assistant { - content: _, - tool_calls, - } = choice.message - else { - anyhow::bail!("Retrieval response didn't include an assistant message"); - }; - - let mut queries: Vec = Vec::new(); - for tool_call in tool_calls { - let open_ai::ToolCallContent::Function { function } = tool_call.content; - if function.name != cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME { - log::warn!( - "Context retrieval response tried to call an unknown tool: {}", - function.name - ); - - continue; - } - - let input: SearchToolInput = serde_json::from_str(&function.arguments) - .with_context(|| format!("invalid search json {}", &function.arguments))?; - queries.extend(input.queries); - } - - if let Some(debug_tx) = &debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::SearchQueriesGenerated( - ZetaSearchQueryDebugInfo { - project: project.clone(), - timestamp: Instant::now(), - search_queries: queries.clone(), - }, - )) - .ok(); - } - - log::trace!("Running retrieval search: {queries:#?}"); - - let related_excerpts_result = - retrieval_search::run_retrieval_searches(project.clone(), queries, cx).await; - - log::trace!("Search queries executed"); - - if let Some(debug_tx) = &debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted( - ZetaContextRetrievalDebugInfo { - project: project.clone(), - timestamp: Instant::now(), - }, - )) - .ok(); - } - - this.update(cx, |this, _cx| { - let Some(zeta_project) = this.projects.get_mut(&project.entity_id()) else { - return Ok(()); - }; - zeta_project.refresh_context_task.take(); - if let Some(debug_tx) = &this.debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::ContextRetrievalFinished( - ZetaContextRetrievalDebugInfo { - project, - timestamp: Instant::now(), - }, - )) - .ok(); - } - match related_excerpts_result { - Ok(excerpts) => { - zeta_project.context = Some(excerpts); - Ok(()) - } - Err(error) => Err(error), - } - })? - }) - } - - pub fn set_context( - &mut self, - project: Entity, - context: HashMap, Vec>>, - ) { - if let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) { - zeta_project.context = Some(context); - } - } - - fn gather_nearby_diagnostics( - cursor_offset: usize, - diagnostic_sets: &[(LanguageServerId, DiagnosticSet)], - snapshot: &BufferSnapshot, - max_diagnostics_bytes: usize, - ) -> (Vec, bool) { - // TODO: Could make this more efficient - let mut diagnostic_groups = Vec::new(); - for (language_server_id, diagnostics) in diagnostic_sets { - let mut groups = Vec::new(); - diagnostics.groups(*language_server_id, &mut groups, &snapshot); - diagnostic_groups.extend( - groups - .into_iter() - .map(|(_, group)| group.resolve::(&snapshot)), - ); - } - - // sort by proximity to cursor - diagnostic_groups.sort_by_key(|group| { - let range = &group.entries[group.primary_ix].range; - if range.start >= cursor_offset { - range.start - cursor_offset - } else if cursor_offset >= range.end { - cursor_offset - range.end - } else { - (cursor_offset - range.start).min(range.end - cursor_offset) - } - }); - - let mut results = Vec::new(); - let mut diagnostic_groups_truncated = false; - let mut diagnostics_byte_count = 0; - for group in diagnostic_groups { - let raw_value = serde_json::value::to_raw_value(&group).unwrap(); - diagnostics_byte_count += raw_value.get().len(); - if diagnostics_byte_count > max_diagnostics_bytes { - diagnostic_groups_truncated = true; - break; - } - results.push(predict_edits_v3::DiagnosticGroup(raw_value)); - } - - (results, diagnostic_groups_truncated) - } - - // TODO: Dedupe with similar code in request_prediction? - pub fn cloud_request_for_zeta_cli( - &mut self, - project: &Entity, - buffer: &Entity, - position: language::Anchor, - cx: &mut Context, - ) -> Task> { - let project_state = self.projects.get(&project.entity_id()); - - let index_state = project_state.map(|state| { - state - .syntax_index - .read_with(cx, |index, _cx| index.state().clone()) - }); - let options = self.options.clone(); - let snapshot = buffer.read(cx).snapshot(); - let Some(excerpt_path) = snapshot.file().map(|path| path.full_path(cx)) else { - return Task::ready(Err(anyhow!("No file path for excerpt"))); - }; - let worktree_snapshots = project - .read(cx) - .worktrees(cx) - .map(|worktree| worktree.read(cx).snapshot()) - .collect::>(); - - let parent_abs_path = project::File::from_dyn(buffer.read(cx).file()).and_then(|f| { - let mut path = f.worktree.read(cx).absolutize(&f.path); - if path.pop() { Some(path) } else { None } - }); - - cx.background_spawn(async move { - let index_state = if let Some(index_state) = index_state { - Some(index_state.lock_owned().await) - } else { - None - }; - - let cursor_point = position.to_point(&snapshot); - - let debug_info = true; - EditPredictionContext::gather_context( - cursor_point, - &snapshot, - parent_abs_path.as_deref(), - match &options.context { - ContextMode::Agentic(_) => { - // TODO - panic!("Llm mode not supported in zeta cli yet"); - } - ContextMode::Syntax(edit_prediction_context_options) => { - edit_prediction_context_options - } - }, - index_state.as_deref(), - ) - .context("Failed to select excerpt") - .map(|context| { - make_syntax_context_cloud_request( - excerpt_path.into(), - context, - // TODO pass everything - Vec::new(), - false, - Vec::new(), - false, - None, - debug_info, - &worktree_snapshots, - index_state.as_deref(), - Some(options.max_prompt_bytes), - options.prompt_format, - ) - }) - }) - } - - pub fn wait_for_initial_indexing( - &mut self, - project: &Entity, - cx: &mut App, - ) -> Task> { - let zeta_project = self.get_or_init_zeta_project(project, cx); - zeta_project - .syntax_index - .read(cx) - .wait_for_initial_file_indexing(cx) - } -} - -pub fn text_from_response(mut res: open_ai::Response) -> Option { - let choice = res.choices.pop()?; - let output_text = match choice.message { - open_ai::RequestMessage::Assistant { - content: Some(open_ai::MessageContent::Plain(content)), - .. - } => content, - open_ai::RequestMessage::Assistant { - content: Some(open_ai::MessageContent::Multipart(mut content)), - .. - } => { - if content.is_empty() { - log::error!("No output from Baseten completion response"); - return None; - } - - match content.remove(0) { - open_ai::MessagePart::Text { text } => text, - open_ai::MessagePart::Image { .. } => { - log::error!("Expected text, got an image"); - return None; - } - } - } - _ => { - log::error!("Invalid response message: {:?}", choice.message); - return None; - } - }; - Some(output_text) -} - -#[derive(Error, Debug)] -#[error( - "You must update to Zed version {minimum_version} or higher to continue using edit predictions." -)] -pub struct ZedUpdateRequiredError { - minimum_version: SemanticVersion, -} - -fn make_syntax_context_cloud_request( - excerpt_path: Arc, - context: EditPredictionContext, - events: Vec, - can_collect_data: bool, - diagnostic_groups: Vec, - diagnostic_groups_truncated: bool, - git_info: Option, - debug_info: bool, - worktrees: &Vec, - index_state: Option<&SyntaxIndexState>, - prompt_max_bytes: Option, - prompt_format: PromptFormat, -) -> predict_edits_v3::PredictEditsRequest { - let mut signatures = Vec::new(); - let mut declaration_to_signature_index = HashMap::default(); - let mut referenced_declarations = Vec::new(); - - for snippet in context.declarations { - let project_entry_id = snippet.declaration.project_entry_id(); - let Some(path) = worktrees.iter().find_map(|worktree| { - worktree.entry_for_id(project_entry_id).map(|entry| { - let mut full_path = RelPathBuf::new(); - full_path.push(worktree.root_name()); - full_path.push(&entry.path); - full_path - }) - }) else { - continue; - }; - - let parent_index = index_state.and_then(|index_state| { - snippet.declaration.parent().and_then(|parent| { - add_signature( - parent, - &mut declaration_to_signature_index, - &mut signatures, - index_state, - ) - }) - }); - - let (text, text_is_truncated) = snippet.declaration.item_text(); - referenced_declarations.push(predict_edits_v3::ReferencedDeclaration { - path: path.as_std_path().into(), - text: text.into(), - range: snippet.declaration.item_line_range(), - text_is_truncated, - signature_range: snippet.declaration.signature_range_in_item_text(), - parent_index, - signature_score: snippet.score(DeclarationStyle::Signature), - declaration_score: snippet.score(DeclarationStyle::Declaration), - score_components: snippet.components, - }); - } - - let excerpt_parent = index_state.and_then(|index_state| { - context - .excerpt - .parent_declarations - .last() - .and_then(|(parent, _)| { - add_signature( - *parent, - &mut declaration_to_signature_index, - &mut signatures, - index_state, - ) - }) - }); - - predict_edits_v3::PredictEditsRequest { - excerpt_path, - excerpt: context.excerpt_text.body, - excerpt_line_range: context.excerpt.line_range, - excerpt_range: context.excerpt.range, - cursor_point: predict_edits_v3::Point { - line: predict_edits_v3::Line(context.cursor_point.row), - column: context.cursor_point.column, - }, - referenced_declarations, - included_files: vec![], - signatures, - excerpt_parent, - events, - can_collect_data, - diagnostic_groups, - diagnostic_groups_truncated, - git_info, - debug_info, - prompt_max_bytes, - prompt_format, - } -} - -fn add_signature( - declaration_id: DeclarationId, - declaration_to_signature_index: &mut HashMap, - signatures: &mut Vec, - index: &SyntaxIndexState, -) -> Option { - if let Some(signature_index) = declaration_to_signature_index.get(&declaration_id) { - return Some(*signature_index); - } - let Some(parent_declaration) = index.declaration(declaration_id) else { - log::error!("bug: missing parent declaration"); - return None; - }; - let parent_index = parent_declaration.parent().and_then(|parent| { - add_signature(parent, declaration_to_signature_index, signatures, index) - }); - let (text, text_is_truncated) = parent_declaration.signature_text(); - let signature_index = signatures.len(); - signatures.push(Signature { - text: text.into(), - text_is_truncated, - parent_index, - range: parent_declaration.signature_line_range(), - }); - declaration_to_signature_index.insert(declaration_id, signature_index); - Some(signature_index) -} - -#[cfg(test)] -mod tests { - use std::{path::Path, sync::Arc}; - - use client::UserStore; - use clock::FakeSystemClock; - use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery}; - use futures::{ - AsyncReadExt, StreamExt, - channel::{mpsc, oneshot}, - }; - use gpui::{ - Entity, TestAppContext, - http_client::{FakeHttpClient, Response}, - prelude::*, - }; - use indoc::indoc; - use language::OffsetRangeExt as _; - use open_ai::Usage; - use pretty_assertions::{assert_eq, assert_matches}; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - use uuid::Uuid; - - use crate::{BufferEditPrediction, Zeta}; - - #[gpui::test] - async fn test_current_state(cx: &mut TestAppContext) { - let (zeta, mut req_rx) = init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "1.txt": "Hello!\nHow\nBye\n", - "2.txt": "Hola!\nComo\nAdios\n" - }), - ) - .await; - let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; - - zeta.update(cx, |zeta, cx| { - zeta.register_project(&project, cx); - }); - - let buffer1 = project - .update(cx, |project, cx| { - let path = project.find_project_path(path!("root/1.txt"), cx).unwrap(); - project.open_buffer(path, cx) - }) - .await - .unwrap(); - let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot()); - let position = snapshot1.anchor_before(language::Point::new(1, 3)); - - // Prediction for current file - - let prediction_task = zeta.update(cx, |zeta, cx| { - zeta.refresh_prediction(&project, &buffer1, position, cx) - }); - let (_request, respond_tx) = req_rx.next().await.unwrap(); - - respond_tx - .send(model_response(indoc! {r" - --- a/root/1.txt - +++ b/root/1.txt - @@ ... @@ - Hello! - -How - +How are you? - Bye - "})) - .unwrap(); - prediction_task.await.unwrap(); - - zeta.read_with(cx, |zeta, cx| { - let prediction = zeta - .current_prediction_for_buffer(&buffer1, &project, cx) - .unwrap(); - assert_matches!(prediction, BufferEditPrediction::Local { .. }); - }); - - // Context refresh - let refresh_task = zeta.update(cx, |zeta, cx| { - zeta.refresh_context(project.clone(), buffer1.clone(), position, cx) - }); - let (_request, respond_tx) = req_rx.next().await.unwrap(); - respond_tx - .send(open_ai::Response { - id: Uuid::new_v4().to_string(), - object: "response".into(), - created: 0, - model: "model".into(), - choices: vec![open_ai::Choice { - index: 0, - message: open_ai::RequestMessage::Assistant { - content: None, - tool_calls: vec![open_ai::ToolCall { - id: "search".into(), - content: open_ai::ToolCallContent::Function { - function: open_ai::FunctionContent { - name: cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME - .to_string(), - arguments: serde_json::to_string(&SearchToolInput { - queries: Box::new([SearchToolQuery { - glob: "root/2.txt".to_string(), - syntax_node: vec![], - content: Some(".".into()), - }]), - }) - .unwrap(), - }, - }, - }], - }, - finish_reason: None, - }], - usage: Usage { - prompt_tokens: 0, - completion_tokens: 0, - total_tokens: 0, - }, - }) - .unwrap(); - refresh_task.await.unwrap(); - - zeta.update(cx, |zeta, _cx| { - zeta.discard_current_prediction(&project); - }); - - // Prediction for another file - let prediction_task = zeta.update(cx, |zeta, cx| { - zeta.refresh_prediction(&project, &buffer1, position, cx) - }); - let (_request, respond_tx) = req_rx.next().await.unwrap(); - respond_tx - .send(model_response(indoc! {r#" - --- a/root/2.txt - +++ b/root/2.txt - Hola! - -Como - +Como estas? - Adios - "#})) - .unwrap(); - prediction_task.await.unwrap(); - zeta.read_with(cx, |zeta, cx| { - let prediction = zeta - .current_prediction_for_buffer(&buffer1, &project, cx) - .unwrap(); - assert_matches!( - prediction, - BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt")) - ); - }); - - let buffer2 = project - .update(cx, |project, cx| { - let path = project.find_project_path(path!("root/2.txt"), cx).unwrap(); - project.open_buffer(path, cx) - }) - .await - .unwrap(); - - zeta.read_with(cx, |zeta, cx| { - let prediction = zeta - .current_prediction_for_buffer(&buffer2, &project, cx) - .unwrap(); - assert_matches!(prediction, BufferEditPrediction::Local { .. }); - }); - } - - #[gpui::test] - async fn test_simple_request(cx: &mut TestAppContext) { - let (zeta, mut req_rx) = init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "foo.md": "Hello!\nHow\nBye\n" - }), - ) - .await; - let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; - - let buffer = project - .update(cx, |project, cx| { - let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); - project.open_buffer(path, cx) - }) - .await - .unwrap(); - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let position = snapshot.anchor_before(language::Point::new(1, 3)); - - let prediction_task = zeta.update(cx, |zeta, cx| { - zeta.request_prediction(&project, &buffer, position, cx) - }); - - let (_, respond_tx) = req_rx.next().await.unwrap(); - - // TODO Put back when we have a structured request again - // assert_eq!( - // request.excerpt_path.as_ref(), - // Path::new(path!("root/foo.md")) - // ); - // assert_eq!( - // request.cursor_point, - // Point { - // line: Line(1), - // column: 3 - // } - // ); - - respond_tx - .send(model_response(indoc! { r" - --- a/root/foo.md - +++ b/root/foo.md - @@ ... @@ - Hello! - -How - +How are you? - Bye - "})) - .unwrap(); - - let prediction = prediction_task.await.unwrap().unwrap(); - - assert_eq!(prediction.edits.len(), 1); - assert_eq!( - prediction.edits[0].0.to_point(&snapshot).start, - language::Point::new(1, 3) - ); - assert_eq!(prediction.edits[0].1.as_ref(), " are you?"); - } - - #[gpui::test] - async fn test_request_events(cx: &mut TestAppContext) { - let (zeta, mut req_rx) = init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "foo.md": "Hello!\n\nBye\n" - }), - ) - .await; - let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; - - let buffer = project - .update(cx, |project, cx| { - let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); - project.open_buffer(path, cx) - }) - .await - .unwrap(); - - zeta.update(cx, |zeta, cx| { - zeta.register_buffer(&buffer, &project, cx); - }); - - buffer.update(cx, |buffer, cx| { - buffer.edit(vec![(7..7, "How")], None, cx); - }); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let position = snapshot.anchor_before(language::Point::new(1, 3)); - - let prediction_task = zeta.update(cx, |zeta, cx| { - zeta.request_prediction(&project, &buffer, position, cx) - }); - - let (request, respond_tx) = req_rx.next().await.unwrap(); - - let prompt = prompt_from_request(&request); - assert!( - prompt.contains(indoc! {" - --- a/root/foo.md - +++ b/root/foo.md - @@ -1,3 +1,3 @@ - Hello! - - - +How - Bye - "}), - "{prompt}" - ); - - respond_tx - .send(model_response(indoc! {r#" - --- a/root/foo.md - +++ b/root/foo.md - @@ ... @@ - Hello! - -How - +How are you? - Bye - "#})) - .unwrap(); - - let prediction = prediction_task.await.unwrap().unwrap(); - - assert_eq!(prediction.edits.len(), 1); - assert_eq!( - prediction.edits[0].0.to_point(&snapshot).start, - language::Point::new(1, 3) - ); - assert_eq!(prediction.edits[0].1.as_ref(), " are you?"); - } - - // Skipped until we start including diagnostics in prompt - // #[gpui::test] - // async fn test_request_diagnostics(cx: &mut TestAppContext) { - // let (zeta, mut req_rx) = init_test(cx); - // let fs = FakeFs::new(cx.executor()); - // fs.insert_tree( - // "/root", - // json!({ - // "foo.md": "Hello!\nBye" - // }), - // ) - // .await; - // let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; - - // let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap(); - // let diagnostic = lsp::Diagnostic { - // range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)), - // severity: Some(lsp::DiagnosticSeverity::ERROR), - // message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(), - // ..Default::default() - // }; - - // project.update(cx, |project, cx| { - // project.lsp_store().update(cx, |lsp_store, cx| { - // // Create some diagnostics - // lsp_store - // .update_diagnostics( - // LanguageServerId(0), - // lsp::PublishDiagnosticsParams { - // uri: path_to_buffer_uri.clone(), - // diagnostics: vec![diagnostic], - // version: None, - // }, - // None, - // language::DiagnosticSourceKind::Pushed, - // &[], - // cx, - // ) - // .unwrap(); - // }); - // }); - - // let buffer = project - // .update(cx, |project, cx| { - // let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); - // project.open_buffer(path, cx) - // }) - // .await - // .unwrap(); - - // let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - // let position = snapshot.anchor_before(language::Point::new(0, 0)); - - // let _prediction_task = zeta.update(cx, |zeta, cx| { - // zeta.request_prediction(&project, &buffer, position, cx) - // }); - - // let (request, _respond_tx) = req_rx.next().await.unwrap(); - - // assert_eq!(request.diagnostic_groups.len(), 1); - // let value = serde_json::from_str::(request.diagnostic_groups[0].0.get()) - // .unwrap(); - // // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3 - // assert_eq!( - // value, - // json!({ - // "entries": [{ - // "range": { - // "start": 8, - // "end": 10 - // }, - // "diagnostic": { - // "source": null, - // "code": null, - // "code_description": null, - // "severity": 1, - // "message": "\"Hello\" deprecated. Use \"Hi\" instead", - // "markdown": null, - // "group_id": 0, - // "is_primary": true, - // "is_disk_based": false, - // "is_unnecessary": false, - // "source_kind": "Pushed", - // "data": null, - // "underline": true - // } - // }], - // "primary_ix": 0 - // }) - // ); - // } - - fn model_response(text: &str) -> open_ai::Response { - open_ai::Response { - id: Uuid::new_v4().to_string(), - object: "response".into(), - created: 0, - model: "model".into(), - choices: vec![open_ai::Choice { - index: 0, - message: open_ai::RequestMessage::Assistant { - content: Some(open_ai::MessageContent::Plain(text.to_string())), - tool_calls: vec![], - }, - finish_reason: None, - }], - usage: Usage { - prompt_tokens: 0, - completion_tokens: 0, - total_tokens: 0, - }, - } - } - - fn prompt_from_request(request: &open_ai::Request) -> &str { - assert_eq!(request.messages.len(), 1); - let open_ai::RequestMessage::User { - content: open_ai::MessageContent::Plain(content), - .. - } = &request.messages[0] - else { - panic!( - "Request does not have single user message of type Plain. {:#?}", - request - ); - }; - content - } - - fn init_test( - cx: &mut TestAppContext, - ) -> ( - Entity, - mpsc::UnboundedReceiver<(open_ai::Request, oneshot::Sender)>, - ) { - cx.update(move |cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - zlog::init_test(); - - let (req_tx, req_rx) = mpsc::unbounded(); - - let http_client = FakeHttpClient::create({ - move |req| { - let uri = req.uri().path().to_string(); - let mut body = req.into_body(); - let req_tx = req_tx.clone(); - async move { - let resp = match uri.as_str() { - "/client/llm_tokens" => serde_json::to_string(&json!({ - "token": "test" - })) - .unwrap(), - "/predict_edits/raw" => { - let mut buf = Vec::new(); - body.read_to_end(&mut buf).await.ok(); - let req = serde_json::from_slice(&buf).unwrap(); - - let (res_tx, res_rx) = oneshot::channel(); - req_tx.unbounded_send((req, res_tx)).unwrap(); - serde_json::to_string(&res_rx.await?).unwrap() - } - _ => { - panic!("Unexpected path: {}", uri) - } - }; - - Ok(Response::builder().body(resp.into()).unwrap()) - } - } - }); - - let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); - client.cloud_client().set_credentials(1, "test".into()); - - language_model::init(client.clone(), cx); - - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - let zeta = Zeta::global(&client, &user_store, cx); - - (zeta, req_rx) - }) - } -} diff --git a/crates/zeta2_tools/Cargo.toml b/crates/zeta2_tools/Cargo.toml deleted file mode 100644 index 3a9b1ccbf9340dfdaa06030e59c2112b9cda6307..0000000000000000000000000000000000000000 --- a/crates/zeta2_tools/Cargo.toml +++ /dev/null @@ -1,52 +0,0 @@ -[package] -name = "zeta2_tools" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/zeta2_tools.rs" - -[dependencies] -anyhow.workspace = true -chrono.workspace = true -client.workspace = true -cloud_llm_client.workspace = true -cloud_zeta2_prompt.workspace = true -collections.workspace = true -edit_prediction_context.workspace = true -editor.workspace = true -feature_flags.workspace = true -futures.workspace = true -gpui.workspace = true -language.workspace = true -log.workspace = true -multi_buffer.workspace = true -ordered-float.workspace = true -project.workspace = true -serde.workspace = true -serde_json.workspace = true -telemetry.workspace = true -text.workspace = true -ui.workspace = true -ui_input.workspace = true -util.workspace = true -workspace.workspace = true -zeta2.workspace = true - -[dev-dependencies] -clap.workspace = true -gpui = { workspace = true, features = ["test-support"] } -indoc.workspace = true -language = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } -serde_json.workspace = true -settings = { workspace = true, features = ["test-support"] } -text = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/zeta2_tools/src/zeta2_context_view.rs b/crates/zeta2_tools/src/zeta2_context_view.rs deleted file mode 100644 index 1826bd22df6d08ce717ef9bdf0070f88ad63c433..0000000000000000000000000000000000000000 --- a/crates/zeta2_tools/src/zeta2_context_view.rs +++ /dev/null @@ -1,438 +0,0 @@ -use std::{ - any::TypeId, - collections::VecDeque, - ops::Add, - sync::Arc, - time::{Duration, Instant}, -}; - -use anyhow::Result; -use client::{Client, UserStore}; -use cloud_zeta2_prompt::retrieval_prompt::SearchToolQuery; -use editor::{Editor, PathKey}; -use futures::StreamExt as _; -use gpui::{ - Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle, - Focusable, ParentElement as _, SharedString, Styled as _, Task, TextAlign, Window, actions, - pulsating_between, -}; -use multi_buffer::MultiBuffer; -use project::Project; -use text::OffsetRangeExt; -use ui::{ - ButtonCommon, Clickable, Color, Disableable, FluentBuilder as _, Icon, IconButton, IconName, - IconSize, InteractiveElement, IntoElement, ListHeader, ListItem, StyledTypography, div, h_flex, - v_flex, -}; -use workspace::{Item, ItemHandle as _}; -use zeta2::{ - Zeta, ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo, - ZetaSearchQueryDebugInfo, -}; - -pub struct Zeta2ContextView { - empty_focus_handle: FocusHandle, - project: Entity, - zeta: Entity, - runs: VecDeque, - current_ix: usize, - _update_task: Task>, -} - -#[derive(Debug)] -struct RetrievalRun { - editor: Entity, - search_queries: Vec, - started_at: Instant, - search_results_generated_at: Option, - search_results_executed_at: Option, - finished_at: Option, -} - -actions!( - dev, - [ - /// Go to the previous context retrieval run - Zeta2ContextGoBack, - /// Go to the next context retrieval run - Zeta2ContextGoForward - ] -); - -impl Zeta2ContextView { - pub fn new( - project: Entity, - client: &Arc, - user_store: &Entity, - window: &mut gpui::Window, - cx: &mut Context, - ) -> Self { - let zeta = Zeta::global(client, user_store, cx); - - let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info()); - let _update_task = cx.spawn_in(window, async move |this, cx| { - while let Some(event) = debug_rx.next().await { - this.update_in(cx, |this, window, cx| { - this.handle_zeta_event(event, window, cx) - })?; - } - Ok(()) - }); - - Self { - empty_focus_handle: cx.focus_handle(), - project, - runs: VecDeque::new(), - current_ix: 0, - zeta, - _update_task, - } - } - - fn handle_zeta_event( - &mut self, - event: ZetaDebugInfo, - window: &mut gpui::Window, - cx: &mut Context, - ) { - match event { - ZetaDebugInfo::ContextRetrievalStarted(info) => { - if info.project == self.project { - self.handle_context_retrieval_started(info, window, cx); - } - } - ZetaDebugInfo::SearchQueriesGenerated(info) => { - if info.project == self.project { - self.handle_search_queries_generated(info, window, cx); - } - } - ZetaDebugInfo::SearchQueriesExecuted(info) => { - if info.project == self.project { - self.handle_search_queries_executed(info, window, cx); - } - } - ZetaDebugInfo::ContextRetrievalFinished(info) => { - if info.project == self.project { - self.handle_context_retrieval_finished(info, window, cx); - } - } - ZetaDebugInfo::EditPredictionRequested(_) => {} - } - } - - fn handle_context_retrieval_started( - &mut self, - info: ZetaContextRetrievalStartedDebugInfo, - window: &mut Window, - cx: &mut Context, - ) { - if self - .runs - .back() - .is_some_and(|run| run.search_results_executed_at.is_none()) - { - self.runs.pop_back(); - } - - let multibuffer = cx.new(|_| MultiBuffer::new(language::Capability::ReadOnly)); - let editor = cx - .new(|cx| Editor::for_multibuffer(multibuffer, Some(self.project.clone()), window, cx)); - - if self.runs.len() == 32 { - self.runs.pop_front(); - } - - self.runs.push_back(RetrievalRun { - editor, - search_queries: Vec::new(), - started_at: info.timestamp, - search_results_generated_at: None, - search_results_executed_at: None, - finished_at: None, - }); - - cx.notify(); - } - - fn handle_context_retrieval_finished( - &mut self, - info: ZetaContextRetrievalDebugInfo, - window: &mut Window, - cx: &mut Context, - ) { - let Some(run) = self.runs.back_mut() else { - return; - }; - - run.finished_at = Some(info.timestamp); - - let multibuffer = run.editor.read(cx).buffer().clone(); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.clear(cx); - - let context = self.zeta.read(cx).context_for_project(&self.project); - let mut paths = Vec::new(); - for (buffer, ranges) in context { - let path = PathKey::for_buffer(&buffer, cx); - let snapshot = buffer.read(cx).snapshot(); - let ranges = ranges - .iter() - .map(|range| range.to_point(&snapshot)) - .collect::>(); - paths.push((path, buffer, ranges)); - } - - for (path, buffer, ranges) in paths { - multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); - } - }); - - run.editor.update(cx, |editor, cx| { - editor.move_to_beginning(&Default::default(), window, cx); - }); - - cx.notify(); - } - - fn handle_search_queries_generated( - &mut self, - info: ZetaSearchQueryDebugInfo, - _window: &mut Window, - cx: &mut Context, - ) { - let Some(run) = self.runs.back_mut() else { - return; - }; - - run.search_results_generated_at = Some(info.timestamp); - run.search_queries = info.search_queries; - cx.notify(); - } - - fn handle_search_queries_executed( - &mut self, - info: ZetaContextRetrievalDebugInfo, - _window: &mut Window, - cx: &mut Context, - ) { - if self.current_ix + 2 == self.runs.len() { - // Switch to latest when the queries are executed - self.current_ix += 1; - } - - let Some(run) = self.runs.back_mut() else { - return; - }; - - run.search_results_executed_at = Some(info.timestamp); - cx.notify(); - } - - fn handle_go_back( - &mut self, - _: &Zeta2ContextGoBack, - window: &mut Window, - cx: &mut Context, - ) { - self.current_ix = self.current_ix.saturating_sub(1); - cx.focus_self(window); - cx.notify(); - } - - fn handle_go_forward( - &mut self, - _: &Zeta2ContextGoForward, - window: &mut Window, - cx: &mut Context, - ) { - self.current_ix = self - .current_ix - .add(1) - .min(self.runs.len().saturating_sub(1)); - cx.focus_self(window); - cx.notify(); - } - - fn render_informational_footer(&self, cx: &mut Context<'_, Zeta2ContextView>) -> ui::Div { - let is_latest = self.runs.len() == self.current_ix + 1; - let run = &self.runs[self.current_ix]; - - h_flex() - .p_2() - .w_full() - .font_buffer(cx) - .text_xs() - .border_t_1() - .gap_2() - .child( - v_flex().h_full().flex_1().children( - run.search_queries - .iter() - .enumerate() - .flat_map(|(ix, query)| { - std::iter::once(ListHeader::new(query.glob.clone()).into_any_element()) - .chain(query.syntax_node.iter().enumerate().map( - move |(regex_ix, regex)| { - ListItem::new(ix * 100 + regex_ix) - .start_slot( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(regex.clone()) - .into_any_element() - }, - )) - .chain(query.content.as_ref().map(move |regex| { - ListItem::new(ix * 100 + query.syntax_node.len()) - .start_slot( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(regex.clone()) - .into_any_element() - })) - }), - ), - ) - .child( - v_flex() - .h_full() - .text_align(TextAlign::Right) - .child( - h_flex() - .justify_end() - .child( - IconButton::new("go-back", IconName::ChevronLeft) - .disabled(self.current_ix == 0 || self.runs.len() < 2) - .tooltip(ui::Tooltip::for_action_title( - "Go to previous run", - &Zeta2ContextGoBack, - )) - .on_click(cx.listener(|this, _, window, cx| { - this.handle_go_back(&Zeta2ContextGoBack, window, cx); - })), - ) - .child( - div() - .child(format!("{}/{}", self.current_ix + 1, self.runs.len())) - .map(|this| { - if self.runs.back().is_some_and(|back| { - back.search_results_executed_at.is_none() - }) { - this.with_animation( - "pulsating-count", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.opacity(delta), - ) - .into_any_element() - } else { - this.into_any_element() - } - }), - ) - .child( - IconButton::new("go-forward", IconName::ChevronRight) - .disabled(self.current_ix + 1 == self.runs.len()) - .tooltip(ui::Tooltip::for_action_title( - "Go to next run", - &Zeta2ContextGoBack, - )) - .on_click(cx.listener(|this, _, window, cx| { - this.handle_go_forward(&Zeta2ContextGoForward, window, cx); - })), - ), - ) - .map(|mut div| { - let pending_message = |div: ui::Div, msg: &'static str| { - if is_latest { - return div.child(msg); - } else { - return div.child("Canceled"); - } - }; - - let t0 = run.started_at; - let Some(t1) = run.search_results_generated_at else { - return pending_message(div, "Planning search..."); - }; - div = div.child(format!("Planned search: {:>5} ms", (t1 - t0).as_millis())); - - let Some(t2) = run.search_results_executed_at else { - return pending_message(div, "Running search..."); - }; - div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis())); - - div.child(format!( - "Total: {:>5} ms", - (run.finished_at.unwrap_or(t0) - t0).as_millis() - )) - }), - ) - } -} - -impl Focusable for Zeta2ContextView { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.runs - .get(self.current_ix) - .map(|run| run.editor.read(cx).focus_handle(cx)) - .unwrap_or_else(|| self.empty_focus_handle.clone()) - } -} - -impl EventEmitter<()> for Zeta2ContextView {} - -impl Item for Zeta2ContextView { - type Event = (); - - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Edit Prediction Context".into() - } - - fn buffer_kind(&self, _cx: &App) -> workspace::item::ItemBufferKind { - workspace::item::ItemBufferKind::Multibuffer - } - - fn act_as_type<'a>( - &'a self, - type_id: TypeId, - self_handle: &'a Entity, - _: &'a App, - ) -> Option { - if type_id == TypeId::of::() { - Some(self_handle.to_any()) - } else if type_id == TypeId::of::() { - Some(self.runs.get(self.current_ix)?.editor.to_any()) - } else { - None - } - } -} - -impl gpui::Render for Zeta2ContextView { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { - v_flex() - .key_context("Zeta2Context") - .on_action(cx.listener(Self::handle_go_back)) - .on_action(cx.listener(Self::handle_go_forward)) - .size_full() - .map(|this| { - if self.runs.is_empty() { - this.child( - v_flex() - .size_full() - .justify_center() - .items_center() - .child("No retrieval runs yet"), - ) - } else { - this.child(self.runs[self.current_ix].editor.clone()) - .child(self.render_informational_footer(cx)) - } - }) - } -} diff --git a/crates/zeta2_tools/src/zeta2_tools.rs b/crates/zeta2_tools/src/zeta2_tools.rs deleted file mode 100644 index 756fff5d621a85f7936a980d71f68c87098c4539..0000000000000000000000000000000000000000 --- a/crates/zeta2_tools/src/zeta2_tools.rs +++ /dev/null @@ -1,1301 +0,0 @@ -mod zeta2_context_view; - -use std::{cmp::Reverse, path::PathBuf, str::FromStr, sync::Arc, time::Duration}; - -use chrono::TimeDelta; -use client::{Client, UserStore}; -use cloud_llm_client::predict_edits_v3::{ - DeclarationScoreComponents, PredictEditsRequest, PromptFormat, -}; -use collections::HashMap; -use editor::{Editor, EditorEvent, EditorMode, ExcerptRange, MultiBuffer}; -use feature_flags::FeatureFlagAppExt as _; -use futures::{FutureExt, StreamExt as _, channel::oneshot, future::Shared}; -use gpui::{ - CursorStyle, Empty, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, - WeakEntity, actions, prelude::*, -}; -use language::{Buffer, DiskState}; -use ordered_float::OrderedFloat; -use project::{Project, WorktreeId, telemetry_snapshot::TelemetrySnapshot}; -use ui::{ButtonLike, ContextMenu, ContextMenuEntry, DropdownMenu, KeyBinding, prelude::*}; -use ui_input::InputField; -use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; -use workspace::{Item, SplitDirection, Workspace}; -use zeta2::{ - AgenticContextOptions, ContextMode, DEFAULT_SYNTAX_CONTEXT_OPTIONS, Zeta, Zeta2FeatureFlag, - ZetaDebugInfo, ZetaEditPredictionDebugInfo, ZetaOptions, -}; - -use edit_prediction_context::{EditPredictionContextOptions, EditPredictionExcerptOptions}; -use zeta2_context_view::Zeta2ContextView; - -actions!( - dev, - [ - /// Opens the edit prediction context view. - OpenZeta2ContextView, - /// Opens the edit prediction inspector. - OpenZeta2Inspector, - /// Rate prediction as positive. - Zeta2RatePredictionPositive, - /// Rate prediction as negative. - Zeta2RatePredictionNegative, - ] -); - -pub fn init(cx: &mut App) { - cx.observe_new(move |workspace: &mut Workspace, _, _cx| { - workspace.register_action(move |workspace, _: &OpenZeta2Inspector, window, cx| { - let project = workspace.project(); - workspace.split_item( - SplitDirection::Right, - Box::new(cx.new(|cx| { - Zeta2Inspector::new( - &project, - workspace.client(), - workspace.user_store(), - window, - cx, - ) - })), - window, - cx, - ); - }); - }) - .detach(); - - cx.observe_new(move |workspace: &mut Workspace, _, _cx| { - workspace.register_action(move |workspace, _: &OpenZeta2ContextView, window, cx| { - let project = workspace.project(); - workspace.split_item( - SplitDirection::Right, - Box::new(cx.new(|cx| { - Zeta2ContextView::new( - project.clone(), - workspace.client(), - workspace.user_store(), - window, - cx, - ) - })), - window, - cx, - ); - }); - }) - .detach(); -} - -// TODO show included diagnostics, and events - -pub struct Zeta2Inspector { - focus_handle: FocusHandle, - project: Entity, - last_prediction: Option, - max_excerpt_bytes_input: Entity, - min_excerpt_bytes_input: Entity, - cursor_context_ratio_input: Entity, - max_prompt_bytes_input: Entity, - context_mode: ContextModeState, - active_view: ActiveView, - zeta: Entity, - _active_editor_subscription: Option, - _update_state_task: Task<()>, - _receive_task: Task<()>, -} - -pub enum ContextModeState { - Llm, - Syntax { - max_retrieved_declarations: Entity, - }, -} - -#[derive(PartialEq)] -enum ActiveView { - Context, - Inference, -} - -struct LastPrediction { - context_editor: Entity, - prompt_editor: Entity, - retrieval_time: TimeDelta, - request_time: Option, - buffer: WeakEntity, - position: language::Anchor, - state: LastPredictionState, - request: PredictEditsRequest, - project_snapshot: Shared>>, - _task: Option>, -} - -#[derive(Clone, Copy, PartialEq)] -enum Feedback { - Positive, - Negative, -} - -enum LastPredictionState { - Requested, - Success { - model_response_editor: Entity, - feedback_editor: Entity, - feedback: Option, - request_id: String, - }, - Failed { - message: String, - }, -} - -impl Zeta2Inspector { - pub fn new( - project: &Entity, - client: &Arc, - user_store: &Entity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let zeta = Zeta::global(client, user_store, cx); - let mut request_rx = zeta.update(cx, |zeta, _cx| zeta.debug_info()); - - let receive_task = cx.spawn_in(window, async move |this, cx| { - while let Some(prediction) = request_rx.next().await { - this.update_in(cx, |this, window, cx| { - this.update_last_prediction(prediction, window, cx) - }) - .ok(); - } - }); - - let mut this = Self { - focus_handle: cx.focus_handle(), - project: project.clone(), - last_prediction: None, - active_view: ActiveView::Inference, - max_excerpt_bytes_input: Self::number_input("Max Excerpt Bytes", window, cx), - min_excerpt_bytes_input: Self::number_input("Min Excerpt Bytes", window, cx), - cursor_context_ratio_input: Self::number_input("Cursor Context Ratio", window, cx), - max_prompt_bytes_input: Self::number_input("Max Prompt Bytes", window, cx), - context_mode: ContextModeState::Llm, - zeta: zeta.clone(), - _active_editor_subscription: None, - _update_state_task: Task::ready(()), - _receive_task: receive_task, - }; - this.set_options_state(&zeta.read(cx).options().clone(), window, cx); - this - } - - fn set_options_state( - &mut self, - options: &ZetaOptions, - window: &mut Window, - cx: &mut Context, - ) { - let excerpt_options = options.context.excerpt(); - self.max_excerpt_bytes_input.update(cx, |input, cx| { - input.set_text(excerpt_options.max_bytes.to_string(), window, cx); - }); - self.min_excerpt_bytes_input.update(cx, |input, cx| { - input.set_text(excerpt_options.min_bytes.to_string(), window, cx); - }); - self.cursor_context_ratio_input.update(cx, |input, cx| { - input.set_text( - format!( - "{:.2}", - excerpt_options.target_before_cursor_over_total_bytes - ), - window, - cx, - ); - }); - self.max_prompt_bytes_input.update(cx, |input, cx| { - input.set_text(options.max_prompt_bytes.to_string(), window, cx); - }); - - match &options.context { - ContextMode::Agentic(_) => { - self.context_mode = ContextModeState::Llm; - } - ContextMode::Syntax(_) => { - self.context_mode = ContextModeState::Syntax { - max_retrieved_declarations: Self::number_input( - "Max Retrieved Definitions", - window, - cx, - ), - }; - } - } - cx.notify(); - } - - fn set_zeta_options(&mut self, options: ZetaOptions, cx: &mut Context) { - self.zeta.update(cx, |this, _cx| this.set_options(options)); - - const DEBOUNCE_TIME: Duration = Duration::from_millis(100); - - if let Some(prediction) = self.last_prediction.as_mut() { - if let Some(buffer) = prediction.buffer.upgrade() { - let position = prediction.position; - let zeta = self.zeta.clone(); - let project = self.project.clone(); - prediction._task = Some(cx.spawn(async move |_this, cx| { - cx.background_executor().timer(DEBOUNCE_TIME).await; - if let Some(task) = zeta - .update(cx, |zeta, cx| { - zeta.refresh_prediction(&project, &buffer, position, cx) - }) - .ok() - { - task.await.log_err(); - } - })); - prediction.state = LastPredictionState::Requested; - } else { - self.last_prediction.take(); - } - } - - cx.notify(); - } - - fn number_input( - label: &'static str, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - let input = cx.new(|cx| { - InputField::new(window, cx, "") - .label(label) - .label_min_width(px(64.)) - }); - - cx.subscribe_in( - &input.read(cx).editor().clone(), - window, - |this, _, event, _window, cx| { - let EditorEvent::BufferEdited = event else { - return; - }; - - fn number_input_value( - input: &Entity, - cx: &App, - ) -> T { - input - .read(cx) - .editor() - .read(cx) - .text(cx) - .parse::() - .unwrap_or_default() - } - - let zeta_options = this.zeta.read(cx).options().clone(); - - let excerpt_options = EditPredictionExcerptOptions { - max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx), - min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx), - target_before_cursor_over_total_bytes: number_input_value( - &this.cursor_context_ratio_input, - cx, - ), - }; - - let context = match zeta_options.context { - ContextMode::Agentic(_context_options) => { - ContextMode::Agentic(AgenticContextOptions { - excerpt: excerpt_options, - }) - } - ContextMode::Syntax(context_options) => { - let max_retrieved_declarations = match &this.context_mode { - ContextModeState::Llm => { - zeta2::DEFAULT_SYNTAX_CONTEXT_OPTIONS.max_retrieved_declarations - } - ContextModeState::Syntax { - max_retrieved_declarations, - } => number_input_value(max_retrieved_declarations, cx), - }; - - ContextMode::Syntax(EditPredictionContextOptions { - excerpt: excerpt_options, - max_retrieved_declarations, - ..context_options - }) - } - }; - - this.set_zeta_options( - ZetaOptions { - context, - max_prompt_bytes: number_input_value(&this.max_prompt_bytes_input, cx), - max_diagnostic_bytes: zeta_options.max_diagnostic_bytes, - prompt_format: zeta_options.prompt_format, - file_indexing_parallelism: zeta_options.file_indexing_parallelism, - buffer_change_grouping_interval: zeta_options - .buffer_change_grouping_interval, - }, - cx, - ); - }, - ) - .detach(); - input - } - - fn update_last_prediction( - &mut self, - prediction: zeta2::ZetaDebugInfo, - window: &mut Window, - cx: &mut Context, - ) { - let project = self.project.read(cx); - let path_style = project.path_style(cx); - let Some(worktree_id) = project - .worktrees(cx) - .next() - .map(|worktree| worktree.read(cx).id()) - else { - log::error!("Open a worktree to use edit prediction debug view"); - self.last_prediction.take(); - return; - }; - - self._update_state_task = cx.spawn_in(window, { - let language_registry = self.project.read(cx).languages().clone(); - async move |this, cx| { - let mut languages = HashMap::default(); - let ZetaDebugInfo::EditPredictionRequested(prediction) = prediction else { - return; - }; - for ext in prediction - .request - .referenced_declarations - .iter() - .filter_map(|snippet| snippet.path.extension()) - .chain(prediction.request.excerpt_path.extension()) - { - if !languages.contains_key(ext) { - // Most snippets are gonna be the same language, - // so we think it's fine to do this sequentially for now - languages.insert( - ext.to_owned(), - language_registry - .language_for_name_or_extension(&ext.to_string_lossy()) - .await - .ok(), - ); - } - } - - let markdown_language = language_registry - .language_for_name("Markdown") - .await - .log_err(); - - let json_language = language_registry.language_for_name("Json").await.log_err(); - - this.update_in(cx, |this, window, cx| { - let context_editor = cx.new(|cx| { - let mut excerpt_score_components = HashMap::default(); - - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(language::Capability::ReadOnly); - let excerpt_file = Arc::new(ExcerptMetadataFile { - title: RelPath::unix("Cursor Excerpt").unwrap().into(), - path_style, - worktree_id, - }); - - let excerpt_buffer = cx.new(|cx| { - let mut buffer = - Buffer::local(prediction.request.excerpt.clone(), cx); - if let Some(language) = prediction - .request - .excerpt_path - .extension() - .and_then(|ext| languages.get(ext)) - { - buffer.set_language(language.clone(), cx); - } - buffer.file_updated(excerpt_file, cx); - buffer - }); - - multibuffer.push_excerpts( - excerpt_buffer, - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], - cx, - ); - - let mut declarations = - prediction.request.referenced_declarations.clone(); - declarations.sort_unstable_by_key(|declaration| { - Reverse(OrderedFloat(declaration.declaration_score)) - }); - - for snippet in &declarations { - let snippet_file = Arc::new(ExcerptMetadataFile { - title: RelPath::unix(&format!( - "{} (Score: {})", - snippet.path.display(), - snippet.declaration_score - )) - .unwrap() - .into(), - path_style, - worktree_id, - }); - - let excerpt_buffer = cx.new(|cx| { - let mut buffer = Buffer::local(snippet.text.clone(), cx); - buffer.file_updated(snippet_file, cx); - if let Some(ext) = snippet.path.extension() - && let Some(language) = languages.get(ext) - { - buffer.set_language(language.clone(), cx); - } - buffer - }); - - let excerpt_ids = multibuffer.push_excerpts( - excerpt_buffer, - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], - cx, - ); - let excerpt_id = excerpt_ids.first().unwrap(); - - excerpt_score_components - .insert(*excerpt_id, snippet.score_components.clone()); - } - - multibuffer - }); - - let mut editor = - Editor::new(EditorMode::full(), multibuffer, None, window, cx); - editor.register_addon(ZetaContextAddon { - excerpt_score_components, - }); - editor - }); - - let ZetaEditPredictionDebugInfo { - response_rx, - position, - buffer, - retrieval_time, - local_prompt, - .. - } = prediction; - - let task = cx.spawn_in(window, { - let markdown_language = markdown_language.clone(); - let json_language = json_language.clone(); - async move |this, cx| { - let response = response_rx.await; - - this.update_in(cx, |this, window, cx| { - if let Some(prediction) = this.last_prediction.as_mut() { - prediction.state = match response { - Ok((Ok(response), request_time)) => { - prediction.request_time = Some(request_time); - - let feedback_editor = cx.new(|cx| { - let buffer = cx.new(|cx| { - let mut buffer = Buffer::local("", cx); - buffer.set_language( - markdown_language.clone(), - cx, - ); - buffer - }); - let buffer = - cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - let mut editor = Editor::new( - EditorMode::AutoHeight { - min_lines: 3, - max_lines: None, - }, - buffer, - None, - window, - cx, - ); - editor.set_placeholder_text( - "Write feedback here", - window, - cx, - ); - editor.set_show_line_numbers(false, cx); - editor.set_show_gutter(false, cx); - editor.set_show_scrollbars(false, cx); - editor - }); - - cx.subscribe_in( - &feedback_editor, - window, - |this, editor, ev, window, cx| match ev { - EditorEvent::BufferEdited => { - if let Some(last_prediction) = - this.last_prediction.as_mut() - && let LastPredictionState::Success { - feedback: feedback_state, - .. - } = &mut last_prediction.state - { - if feedback_state.take().is_some() { - editor.update(cx, |editor, cx| { - editor.set_placeholder_text( - "Write feedback here", - window, - cx, - ); - }); - cx.notify(); - } - } - } - _ => {} - }, - ) - .detach(); - - LastPredictionState::Success { - model_response_editor: cx.new(|cx| { - let buffer = cx.new(|cx| { - let mut buffer = Buffer::local( - serde_json::to_string_pretty(&response) - .unwrap_or_default(), - cx, - ); - buffer.set_language(json_language, cx); - buffer - }); - let buffer = cx.new(|cx| { - MultiBuffer::singleton(buffer, cx) - }); - let mut editor = Editor::new( - EditorMode::full(), - buffer, - None, - window, - cx, - ); - editor.set_read_only(true); - editor.set_show_line_numbers(false, cx); - editor.set_show_gutter(false, cx); - editor.set_show_scrollbars(false, cx); - editor - }), - feedback_editor, - feedback: None, - request_id: response.id.clone(), - } - } - Ok((Err(err), request_time)) => { - prediction.request_time = Some(request_time); - LastPredictionState::Failed { message: err } - } - Err(oneshot::Canceled) => LastPredictionState::Failed { - message: "Canceled".to_string(), - }, - }; - } - }) - .ok(); - } - }); - - let project_snapshot_task = TelemetrySnapshot::new(&this.project, cx); - - this.last_prediction = Some(LastPrediction { - context_editor, - prompt_editor: cx.new(|cx| { - let buffer = cx.new(|cx| { - let mut buffer = - Buffer::local(local_prompt.unwrap_or_else(|err| err), cx); - buffer.set_language(markdown_language.clone(), cx); - buffer - }); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - let mut editor = - Editor::new(EditorMode::full(), buffer, None, window, cx); - editor.set_read_only(true); - editor.set_show_line_numbers(false, cx); - editor.set_show_gutter(false, cx); - editor.set_show_scrollbars(false, cx); - editor - }), - retrieval_time, - request_time: None, - buffer, - position, - state: LastPredictionState::Requested, - project_snapshot: cx - .foreground_executor() - .spawn(async move { Arc::new(project_snapshot_task.await) }) - .shared(), - request: prediction.request, - _task: Some(task), - }); - cx.notify(); - }) - .ok(); - } - }); - } - - fn handle_rate_positive( - &mut self, - _action: &Zeta2RatePredictionPositive, - window: &mut Window, - cx: &mut Context, - ) { - self.handle_rate(Feedback::Positive, window, cx); - } - - fn handle_rate_negative( - &mut self, - _action: &Zeta2RatePredictionNegative, - window: &mut Window, - cx: &mut Context, - ) { - self.handle_rate(Feedback::Negative, window, cx); - } - - fn handle_rate(&mut self, kind: Feedback, window: &mut Window, cx: &mut Context) { - let Some(last_prediction) = self.last_prediction.as_mut() else { - return; - }; - if !last_prediction.request.can_collect_data { - return; - } - - let project_snapshot_task = last_prediction.project_snapshot.clone(); - - cx.spawn_in(window, async move |this, cx| { - let project_snapshot = project_snapshot_task.await; - this.update_in(cx, |this, window, cx| { - let Some(last_prediction) = this.last_prediction.as_mut() else { - return; - }; - - let LastPredictionState::Success { - feedback: feedback_state, - feedback_editor, - model_response_editor, - request_id, - .. - } = &mut last_prediction.state - else { - return; - }; - - *feedback_state = Some(kind); - let text = feedback_editor.update(cx, |feedback_editor, cx| { - feedback_editor.set_placeholder_text( - "Submitted. Edit or submit again to change.", - window, - cx, - ); - feedback_editor.text(cx) - }); - cx.notify(); - - cx.defer_in(window, { - let model_response_editor = model_response_editor.downgrade(); - move |_, window, cx| { - if let Some(model_response_editor) = model_response_editor.upgrade() { - model_response_editor.focus_handle(cx).focus(window); - } - } - }); - - let kind = match kind { - Feedback::Positive => "positive", - Feedback::Negative => "negative", - }; - - telemetry::event!( - "Zeta2 Prediction Rated", - id = request_id, - kind = kind, - text = text, - request = last_prediction.request, - project_snapshot = project_snapshot, - ); - }) - .log_err(); - }) - .detach(); - } - - fn focus_feedback(&mut self, window: &mut Window, cx: &mut Context) { - if let Some(last_prediction) = self.last_prediction.as_mut() { - if let LastPredictionState::Success { - feedback_editor, .. - } = &mut last_prediction.state - { - feedback_editor.focus_handle(cx).focus(window); - } - }; - } - - fn render_options(&self, window: &mut Window, cx: &mut Context) -> Div { - v_flex() - .gap_2() - .child( - h_flex() - .child(Headline::new("Options").size(HeadlineSize::Small)) - .justify_between() - .child( - ui::Button::new("reset-options", "Reset") - .disabled(self.zeta.read(cx).options() == &zeta2::DEFAULT_OPTIONS) - .style(ButtonStyle::Outlined) - .size(ButtonSize::Large) - .on_click(cx.listener(|this, _, window, cx| { - this.set_options_state(&zeta2::DEFAULT_OPTIONS, window, cx); - })), - ), - ) - .child( - v_flex() - .gap_2() - .child( - h_flex() - .gap_2() - .items_end() - .child(self.max_excerpt_bytes_input.clone()) - .child(self.min_excerpt_bytes_input.clone()) - .child(self.cursor_context_ratio_input.clone()) - .child(self.render_context_mode_dropdown(window, cx)), - ) - .child( - h_flex() - .gap_2() - .items_end() - .children(match &self.context_mode { - ContextModeState::Llm => None, - ContextModeState::Syntax { - max_retrieved_declarations, - } => Some(max_retrieved_declarations.clone()), - }) - .child(self.max_prompt_bytes_input.clone()) - .child(self.render_prompt_format_dropdown(window, cx)), - ), - ) - } - - fn render_context_mode_dropdown(&self, window: &mut Window, cx: &mut Context) -> Div { - let this = cx.weak_entity(); - - v_flex() - .gap_1p5() - .child( - Label::new("Context Mode") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child( - DropdownMenu::new( - "ep-ctx-mode", - match &self.context_mode { - ContextModeState::Llm => "LLM-based", - ContextModeState::Syntax { .. } => "Syntax", - }, - ContextMenu::build(window, cx, move |menu, _window, _cx| { - menu.item( - ContextMenuEntry::new("LLM-based") - .toggleable( - IconPosition::End, - matches!(self.context_mode, ContextModeState::Llm), - ) - .handler({ - let this = this.clone(); - move |window, cx| { - this.update(cx, |this, cx| { - let current_options = - this.zeta.read(cx).options().clone(); - match current_options.context.clone() { - ContextMode::Agentic(_) => {} - ContextMode::Syntax(context_options) => { - let options = ZetaOptions { - context: ContextMode::Agentic( - AgenticContextOptions { - excerpt: context_options.excerpt, - }, - ), - ..current_options - }; - this.set_options_state(&options, window, cx); - this.set_zeta_options(options, cx); - } - } - }) - .ok(); - } - }), - ) - .item( - ContextMenuEntry::new("Syntax") - .toggleable( - IconPosition::End, - matches!(self.context_mode, ContextModeState::Syntax { .. }), - ) - .handler({ - move |window, cx| { - this.update(cx, |this, cx| { - let current_options = - this.zeta.read(cx).options().clone(); - match current_options.context.clone() { - ContextMode::Agentic(context_options) => { - let options = ZetaOptions { - context: ContextMode::Syntax( - EditPredictionContextOptions { - excerpt: context_options.excerpt, - ..DEFAULT_SYNTAX_CONTEXT_OPTIONS - }, - ), - ..current_options - }; - this.set_options_state(&options, window, cx); - this.set_zeta_options(options, cx); - } - ContextMode::Syntax(_) => {} - } - }) - .ok(); - } - }), - ) - }), - ) - .style(ui::DropdownStyle::Outlined), - ) - } - - fn render_prompt_format_dropdown(&self, window: &mut Window, cx: &mut Context) -> Div { - let active_format = self.zeta.read(cx).options().prompt_format; - let this = cx.weak_entity(); - - v_flex() - .gap_1p5() - .child( - Label::new("Prompt Format") - .size(LabelSize::Small) - .color(Color::Muted), - ) - .child( - DropdownMenu::new( - "ep-prompt-format", - active_format.to_string(), - ContextMenu::build(window, cx, move |mut menu, _window, _cx| { - for prompt_format in PromptFormat::iter() { - menu = menu.item( - ContextMenuEntry::new(prompt_format.to_string()) - .toggleable(IconPosition::End, active_format == prompt_format) - .handler({ - let this = this.clone(); - move |_window, cx| { - this.update(cx, |this, cx| { - let current_options = - this.zeta.read(cx).options().clone(); - let options = ZetaOptions { - prompt_format, - ..current_options - }; - this.set_zeta_options(options, cx); - }) - .ok(); - } - }), - ) - } - menu - }), - ) - .style(ui::DropdownStyle::Outlined), - ) - } - - fn render_tabs(&self, cx: &mut Context) -> Option { - if self.last_prediction.is_none() { - return None; - }; - - Some( - ui::ToggleButtonGroup::single_row( - "prediction", - [ - ui::ToggleButtonSimple::new( - "Context", - cx.listener(|this, _, _, cx| { - this.active_view = ActiveView::Context; - cx.notify(); - }), - ), - ui::ToggleButtonSimple::new( - "Inference", - cx.listener(|this, _, window, cx| { - this.active_view = ActiveView::Inference; - this.focus_feedback(window, cx); - cx.notify(); - }), - ), - ], - ) - .style(ui::ToggleButtonGroupStyle::Outlined) - .selected_index(if self.active_view == ActiveView::Context { - 0 - } else { - 1 - }) - .into_any_element(), - ) - } - - fn render_stats(&self) -> Option
{ - let Some(prediction) = self.last_prediction.as_ref() else { - return None; - }; - - Some( - v_flex() - .p_4() - .gap_2() - .min_w(px(160.)) - .child(Headline::new("Stats").size(HeadlineSize::Small)) - .child(Self::render_duration( - "Context retrieval", - Some(prediction.retrieval_time), - )) - .child(Self::render_duration("Request", prediction.request_time)), - ) - } - - fn render_duration(name: &'static str, time: Option) -> Div { - h_flex() - .gap_1() - .child(Label::new(name).color(Color::Muted).size(LabelSize::Small)) - .child(match time { - Some(time) => Label::new(if time.num_microseconds().unwrap_or(0) >= 1000 { - format!("{} ms", time.num_milliseconds()) - } else { - format!("{} µs", time.num_microseconds().unwrap_or(0)) - }) - .size(LabelSize::Small), - None => Label::new("...").size(LabelSize::Small), - }) - } - - fn render_content(&self, _: &mut Window, cx: &mut Context) -> AnyElement { - if !cx.has_flag::() { - return Self::render_message("`zeta2` feature flag is not enabled"); - } - - match self.last_prediction.as_ref() { - None => Self::render_message("No prediction"), - Some(prediction) => self.render_last_prediction(prediction, cx).into_any(), - } - } - - fn render_message(message: impl Into) -> AnyElement { - v_flex() - .size_full() - .justify_center() - .items_center() - .child(Label::new(message).size(LabelSize::Large)) - .into_any() - } - - fn render_last_prediction(&self, prediction: &LastPrediction, cx: &mut Context) -> Div { - match &self.active_view { - ActiveView::Context => div().size_full().child(prediction.context_editor.clone()), - ActiveView::Inference => h_flex() - .items_start() - .w_full() - .flex_1() - .border_t_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().editor_background) - .child( - v_flex() - .flex_1() - .gap_2() - .p_4() - .h_full() - .child( - h_flex() - .justify_between() - .child(ui::Headline::new("Prompt").size(ui::HeadlineSize::XSmall)) - .child(match prediction.state { - LastPredictionState::Requested - | LastPredictionState::Failed { .. } => ui::Chip::new("Local") - .bg_color(cx.theme().status().warning_background) - .label_color(Color::Success), - LastPredictionState::Success { .. } => ui::Chip::new("Cloud") - .bg_color(cx.theme().status().success_background) - .label_color(Color::Success), - }), - ) - .child(prediction.prompt_editor.clone()), - ) - .child(ui::vertical_divider()) - .child( - v_flex() - .flex_1() - .gap_2() - .h_full() - .child( - v_flex() - .flex_1() - .gap_2() - .p_4() - .child( - ui::Headline::new("Model Response") - .size(ui::HeadlineSize::XSmall), - ) - .child(match &prediction.state { - LastPredictionState::Success { - model_response_editor, - .. - } => model_response_editor.clone().into_any_element(), - LastPredictionState::Requested => v_flex() - .gap_2() - .child(Label::new("Loading...").buffer_font(cx)) - .into_any_element(), - LastPredictionState::Failed { message } => v_flex() - .gap_2() - .max_w_96() - .child(Label::new(message.clone()).buffer_font(cx)) - .into_any_element(), - }), - ) - .child(ui::divider()) - .child( - if prediction.request.can_collect_data - && let LastPredictionState::Success { - feedback_editor, - feedback: feedback_state, - .. - } = &prediction.state - { - v_flex() - .key_context("Zeta2Feedback") - .on_action(cx.listener(Self::handle_rate_positive)) - .on_action(cx.listener(Self::handle_rate_negative)) - .gap_2() - .p_2() - .child(feedback_editor.clone()) - .child( - h_flex() - .justify_end() - .w_full() - .child( - ButtonLike::new("rate-positive") - .when( - *feedback_state == Some(Feedback::Positive), - |this| this.style(ButtonStyle::Filled), - ) - .child( - KeyBinding::for_action( - &Zeta2RatePredictionPositive, - cx, - ) - .size(TextSize::Small.rems(cx)), - ) - .child(ui::Icon::new(ui::IconName::ThumbsUp)) - .on_click(cx.listener( - |this, _, window, cx| { - this.handle_rate_positive( - &Zeta2RatePredictionPositive, - window, - cx, - ); - }, - )), - ) - .child( - ButtonLike::new("rate-negative") - .when( - *feedback_state == Some(Feedback::Negative), - |this| this.style(ButtonStyle::Filled), - ) - .child( - KeyBinding::for_action( - &Zeta2RatePredictionNegative, - cx, - ) - .size(TextSize::Small.rems(cx)), - ) - .child(ui::Icon::new(ui::IconName::ThumbsDown)) - .on_click(cx.listener( - |this, _, window, cx| { - this.handle_rate_negative( - &Zeta2RatePredictionNegative, - window, - cx, - ); - }, - )), - ), - ) - .into_any() - } else { - Empty.into_any_element() - }, - ), - ), - } - } -} - -impl Focusable for Zeta2Inspector { - fn focus_handle(&self, _cx: &App) -> FocusHandle { - self.focus_handle.clone() - } -} - -impl Item for Zeta2Inspector { - type Event = (); - - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Zeta2 Inspector".into() - } -} - -impl EventEmitter<()> for Zeta2Inspector {} - -impl Render for Zeta2Inspector { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - v_flex() - .size_full() - .bg(cx.theme().colors().editor_background) - .child( - h_flex() - .w_full() - .child( - v_flex() - .flex_1() - .p_4() - .h_full() - .justify_between() - .child(self.render_options(window, cx)) - .gap_4() - .children(self.render_tabs(cx)), - ) - .child(ui::vertical_divider()) - .children(self.render_stats()), - ) - .child(self.render_content(window, cx)) - } -} - -// Using same approach as commit view - -struct ExcerptMetadataFile { - title: Arc, - worktree_id: WorktreeId, - path_style: PathStyle, -} - -impl language::File for ExcerptMetadataFile { - fn as_local(&self) -> Option<&dyn language::LocalFile> { - None - } - - fn disk_state(&self) -> DiskState { - DiskState::New - } - - fn path(&self) -> &Arc { - &self.title - } - - fn full_path(&self, _: &App) -> PathBuf { - self.title.as_std_path().to_path_buf() - } - - fn file_name<'a>(&'a self, _: &'a App) -> &'a str { - self.title.file_name().unwrap() - } - - fn path_style(&self, _: &App) -> PathStyle { - self.path_style - } - - fn worktree_id(&self, _: &App) -> WorktreeId { - self.worktree_id - } - - fn to_proto(&self, _: &App) -> language::proto::File { - unimplemented!() - } - - fn is_private(&self) -> bool { - false - } -} - -struct ZetaContextAddon { - excerpt_score_components: HashMap, -} - -impl editor::Addon for ZetaContextAddon { - fn to_any(&self) -> &dyn std::any::Any { - self - } - - fn render_buffer_header_controls( - &self, - excerpt_info: &multi_buffer::ExcerptInfo, - _window: &Window, - _cx: &App, - ) -> Option { - let score_components = self.excerpt_score_components.get(&excerpt_info.id)?.clone(); - - Some( - div() - .id(excerpt_info.id.to_proto() as usize) - .child(ui::Icon::new(IconName::Info)) - .cursor(CursorStyle::PointingHand) - .tooltip(move |_, cx| { - cx.new(|_| ScoreComponentsTooltip::new(&score_components)) - .into() - }) - .into_any(), - ) - } -} - -struct ScoreComponentsTooltip { - text: SharedString, -} - -impl ScoreComponentsTooltip { - fn new(components: &DeclarationScoreComponents) -> Self { - Self { - text: format!("{:#?}", components).into(), - } - } -} - -impl Render for ScoreComponentsTooltip { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - div().pl_2().pt_2p5().child( - div() - .elevation_2(cx) - .py_1() - .px_2() - .child(ui::Label::new(self.text.clone()).buffer_font(cx)), - ) - } -} diff --git a/crates/zeta_cli/src/evaluate.rs b/crates/zeta_cli/src/evaluate.rs deleted file mode 100644 index b5c23af24845a90d153943f6ee2ccd29bbfaf6a7..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/evaluate.rs +++ /dev/null @@ -1,299 +0,0 @@ -use std::{ - io::IsTerminal, - path::{Path, PathBuf}, - sync::Arc, -}; - -use anyhow::Result; -use clap::Args; -use collections::HashSet; -use gpui::AsyncApp; -use zeta2::udiff::DiffLine; - -use crate::{ - PromptFormat, - example::{Example, NamedExample}, - headless::ZetaCliAppState, - predict::{PredictionDetails, zeta2_predict}, -}; - -#[derive(Debug, Args)] -pub struct EvaluateArguments { - example_paths: Vec, - #[clap(long)] - skip_cache: bool, - #[arg(long, value_enum, default_value_t = PromptFormat::default())] - prompt_format: PromptFormat, - #[arg(long)] - use_expected_context: bool, -} - -pub async fn run_evaluate( - args: EvaluateArguments, - app_state: &Arc, - cx: &mut AsyncApp, -) { - let example_len = args.example_paths.len(); - let all_tasks = args.example_paths.into_iter().map(|path| { - let app_state = app_state.clone(); - cx.spawn(async move |cx| { - run_evaluate_one( - &path, - args.skip_cache, - args.prompt_format, - args.use_expected_context, - app_state.clone(), - cx, - ) - .await - }) - }); - let all_results = futures::future::try_join_all(all_tasks).await.unwrap(); - - let aggregated_result = EvaluationResult { - context: Scores::aggregate(all_results.iter().map(|r| &r.context)), - edit_prediction: Scores::aggregate(all_results.iter().map(|r| &r.edit_prediction)), - }; - - if example_len > 1 { - println!("\n{}", "-".repeat(80)); - println!("# TOTAL SCORES:"); - println!("{}", aggregated_result.to_markdown()); - } -} - -pub async fn run_evaluate_one( - example_path: &Path, - skip_cache: bool, - prompt_format: PromptFormat, - use_expected_context: bool, - app_state: Arc, - cx: &mut AsyncApp, -) -> Result { - let example = NamedExample::load(&example_path).unwrap(); - let predictions = zeta2_predict( - example.clone(), - skip_cache, - prompt_format, - use_expected_context, - &app_state, - cx, - ) - .await - .unwrap(); - - let evaluation_result = evaluate(&example.example, &predictions); - - println!( - "## Expected edit prediction:\n\n```diff\n{}\n```\n", - compare_diffs(&example.example.expected_patch, &predictions.diff) - ); - println!( - "## Actual edit prediction:\n\n```diff\n{}\n```\n", - compare_diffs(&predictions.diff, &example.example.expected_patch) - ); - - println!("{}", evaluation_result.to_markdown()); - - anyhow::Ok(evaluation_result) -} - -#[derive(Debug, Default)] -pub struct EvaluationResult { - pub edit_prediction: Scores, - pub context: Scores, -} - -#[derive(Default, Debug)] -pub struct Scores { - pub true_positives: usize, - pub false_positives: usize, - pub false_negatives: usize, -} - -impl Scores { - pub fn new(expected: &HashSet, actual: &HashSet) -> Scores { - let true_positives = expected.intersection(actual).count(); - let false_positives = actual.difference(expected).count(); - let false_negatives = expected.difference(actual).count(); - - Scores { - true_positives, - false_positives, - false_negatives, - } - } - - pub fn to_markdown(&self) -> String { - format!( - " -Precision : {:.4} -Recall : {:.4} -F1 Score : {:.4} -True Positives : {} -False Positives : {} -False Negatives : {}", - self.precision(), - self.recall(), - self.f1_score(), - self.true_positives, - self.false_positives, - self.false_negatives - ) - } - - pub fn aggregate<'a>(scores: impl Iterator) -> Scores { - let mut true_positives = 0; - let mut false_positives = 0; - let mut false_negatives = 0; - - for score in scores { - true_positives += score.true_positives; - false_positives += score.false_positives; - false_negatives += score.false_negatives; - } - - Scores { - true_positives, - false_positives, - false_negatives, - } - } - - pub fn precision(&self) -> f64 { - if self.true_positives + self.false_positives == 0 { - 0.0 - } else { - self.true_positives as f64 / (self.true_positives + self.false_positives) as f64 - } - } - - pub fn recall(&self) -> f64 { - if self.true_positives + self.false_negatives == 0 { - 0.0 - } else { - self.true_positives as f64 / (self.true_positives + self.false_negatives) as f64 - } - } - - pub fn f1_score(&self) -> f64 { - let recall = self.recall(); - let precision = self.precision(); - if precision + recall == 0.0 { - 0.0 - } else { - 2.0 * precision * recall / (precision + recall) - } - } -} - -impl EvaluationResult { - pub fn to_markdown(&self) -> String { - format!( - r#" -### Context Scores -{} - -### Edit Prediction Scores -{} -"#, - self.context.to_markdown(), - self.edit_prediction.to_markdown() - ) - } -} - -pub fn evaluate(example: &Example, preds: &PredictionDetails) -> EvaluationResult { - let mut eval_result = EvaluationResult::default(); - - let actual_context_lines: HashSet<_> = preds - .excerpts - .iter() - .flat_map(|excerpt| { - excerpt - .text - .lines() - .map(|line| format!("{}: {line}", excerpt.path.display())) - }) - .collect(); - - let mut false_positive_lines = actual_context_lines.clone(); - - for entry in &example.expected_context { - let mut best_alternative_score = Scores::default(); - - for alternative in &entry.alternatives { - let expected: HashSet<_> = alternative - .excerpts - .iter() - .flat_map(|excerpt| { - excerpt - .text - .lines() - .map(|line| format!("{}: {line}", excerpt.path.display())) - }) - .collect(); - - let scores = Scores::new(&expected, &actual_context_lines); - - false_positive_lines.retain(|line| !actual_context_lines.contains(line)); - - if scores.recall() > best_alternative_score.recall() { - best_alternative_score = scores; - } - } - - eval_result.context.false_negatives += best_alternative_score.false_negatives; - eval_result.context.true_positives += best_alternative_score.true_positives; - } - - eval_result.context.false_positives = false_positive_lines.len(); - - // todo: alternatives for patches - let expected_patch_lines = example - .expected_patch - .lines() - .map(DiffLine::parse) - .filter(|line| matches!(line, DiffLine::Addition(_) | DiffLine::Deletion(_))) - .map(|line| line.to_string()) - .collect(); - - let actual_patch_lines = preds - .diff - .lines() - .map(DiffLine::parse) - .filter(|line| matches!(line, DiffLine::Addition(_) | DiffLine::Deletion(_))) - .map(|line| line.to_string()) - .collect(); - - eval_result.edit_prediction = Scores::new(&expected_patch_lines, &actual_patch_lines); - eval_result -} - -/// Return annotated `patch_a` so that: -/// Additions and deletions that are not present in `patch_b` will be highlighted in red. -/// Additions and deletions that are present in `patch_b` will be highlighted in green. -pub fn compare_diffs(patch_a: &str, patch_b: &str) -> String { - let use_color = std::io::stdout().is_terminal(); - let green = if use_color { "\x1b[32m✓ " } else { "" }; - let red = if use_color { "\x1b[31m✗ " } else { "" }; - let neutral = if use_color { " " } else { "" }; - let reset = if use_color { "\x1b[0m" } else { "" }; - let lines_a = patch_a.lines().map(DiffLine::parse); - let lines_b: Vec<_> = patch_b.lines().map(DiffLine::parse).collect(); - - let annotated = lines_a - .map(|line| match line { - DiffLine::Addition(_) | DiffLine::Deletion(_) => { - if lines_b.contains(&line) { - format!("{green}{line}{reset}") - } else { - format!("{red}{line}{reset}") - } - } - _ => format!("{neutral}{line}{reset}"), - }) - .collect::>(); - - annotated.join("\n") -} diff --git a/crates/zeta_cli/src/example.rs b/crates/zeta_cli/src/example.rs deleted file mode 100644 index a470effa575f5e8ece3c59781dc09d9d1c5e822e..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/example.rs +++ /dev/null @@ -1,684 +0,0 @@ -use std::{ - borrow::Cow, - cell::RefCell, - fmt::{self, Display}, - fs, - io::Write, - mem, - path::{Path, PathBuf}, - sync::Arc, -}; - -use anyhow::{Context as _, Result, anyhow}; -use clap::ValueEnum; -use cloud_zeta2_prompt::CURSOR_MARKER; -use collections::HashMap; -use edit_prediction_context::Line; -use futures::{ - AsyncWriteExt as _, - lock::{Mutex, OwnedMutexGuard}, -}; -use gpui::{AsyncApp, Entity, http_client::Url}; -use language::{Anchor, Buffer}; -use project::{Project, ProjectPath}; -use pulldown_cmark::CowStr; -use serde::{Deserialize, Serialize}; -use util::{paths::PathStyle, rel_path::RelPath}; -use zeta2::udiff::OpenedBuffers; - -use crate::paths::{REPOS_DIR, WORKTREES_DIR}; - -const UNCOMMITTED_DIFF_HEADING: &str = "Uncommitted Diff"; -const EDIT_HISTORY_HEADING: &str = "Edit History"; -const CURSOR_POSITION_HEADING: &str = "Cursor Position"; -const EXPECTED_PATCH_HEADING: &str = "Expected Patch"; -const EXPECTED_CONTEXT_HEADING: &str = "Expected Context"; -const REPOSITORY_URL_FIELD: &str = "repository_url"; -const REVISION_FIELD: &str = "revision"; - -#[derive(Debug, Clone)] -pub struct NamedExample { - pub name: String, - pub example: Example, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Example { - pub repository_url: String, - pub revision: String, - pub uncommitted_diff: String, - pub cursor_path: PathBuf, - pub cursor_position: String, - pub edit_history: String, - pub expected_patch: String, - pub expected_context: Vec, -} - -pub type ActualExcerpt = Excerpt; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Excerpt { - pub path: PathBuf, - pub text: String, -} - -#[derive(Default, Clone, Debug, Serialize, Deserialize)] -pub struct ExpectedContextEntry { - pub heading: String, - pub alternatives: Vec, -} - -#[derive(Default, Clone, Debug, Serialize, Deserialize)] -pub struct ExpectedExcerptSet { - pub heading: String, - pub excerpts: Vec, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ExpectedExcerpt { - pub path: PathBuf, - pub text: String, - pub required_lines: Vec, -} - -#[derive(ValueEnum, Debug, Clone)] -pub enum ExampleFormat { - Json, - Toml, - Md, -} - -impl NamedExample { - pub fn load(path: impl AsRef) -> Result { - let path = path.as_ref(); - let content = std::fs::read_to_string(path)?; - let ext = path.extension(); - - match ext.and_then(|s| s.to_str()) { - Some("json") => Ok(Self { - name: path.file_stem().unwrap_or_default().display().to_string(), - example: serde_json::from_str(&content)?, - }), - Some("toml") => Ok(Self { - name: path.file_stem().unwrap_or_default().display().to_string(), - example: toml::from_str(&content)?, - }), - Some("md") => Self::parse_md(&content), - Some(_) => { - anyhow::bail!("Unrecognized example extension: {}", ext.unwrap().display()); - } - None => { - anyhow::bail!( - "Failed to determine example type since the file does not have an extension." - ); - } - } - } - - pub fn parse_md(input: &str) -> Result { - use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Parser, Tag, TagEnd}; - - let parser = Parser::new(input); - - let mut named = NamedExample { - name: String::new(), - example: Example { - repository_url: String::new(), - revision: String::new(), - uncommitted_diff: String::new(), - cursor_path: PathBuf::new(), - cursor_position: String::new(), - edit_history: String::new(), - expected_patch: String::new(), - expected_context: Vec::new(), - }, - }; - - let mut text = String::new(); - let mut block_info: CowStr = "".into(); - - #[derive(PartialEq)] - enum Section { - UncommittedDiff, - EditHistory, - CursorPosition, - ExpectedExcerpts, - ExpectedPatch, - Other, - } - - let mut current_section = Section::Other; - - for event in parser { - match event { - Event::Text(line) => { - text.push_str(&line); - - if !named.name.is_empty() - && current_section == Section::Other - // in h1 section - && let Some((field, value)) = line.split_once('=') - { - match field.trim() { - REPOSITORY_URL_FIELD => { - named.example.repository_url = value.trim().to_string(); - } - REVISION_FIELD => { - named.example.revision = value.trim().to_string(); - } - _ => {} - } - } - } - Event::End(TagEnd::Heading(HeadingLevel::H1)) => { - if !named.name.is_empty() { - anyhow::bail!( - "Found multiple H1 headings. There should only be one with the name of the example." - ); - } - named.name = mem::take(&mut text); - } - Event::End(TagEnd::Heading(HeadingLevel::H2)) => { - let title = mem::take(&mut text); - current_section = if title.eq_ignore_ascii_case(UNCOMMITTED_DIFF_HEADING) { - Section::UncommittedDiff - } else if title.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) { - Section::EditHistory - } else if title.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) { - Section::CursorPosition - } else if title.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) { - Section::ExpectedPatch - } else if title.eq_ignore_ascii_case(EXPECTED_CONTEXT_HEADING) { - Section::ExpectedExcerpts - } else { - Section::Other - }; - } - Event::End(TagEnd::Heading(HeadingLevel::H3)) => { - let heading = mem::take(&mut text); - match current_section { - Section::ExpectedExcerpts => { - named.example.expected_context.push(ExpectedContextEntry { - heading, - alternatives: Vec::new(), - }); - } - _ => {} - } - } - Event::End(TagEnd::Heading(HeadingLevel::H4)) => { - let heading = mem::take(&mut text); - match current_section { - Section::ExpectedExcerpts => { - let expected_context = &mut named.example.expected_context; - let last_entry = expected_context.last_mut().unwrap(); - last_entry.alternatives.push(ExpectedExcerptSet { - heading, - excerpts: Vec::new(), - }) - } - _ => {} - } - } - Event::End(TagEnd::Heading(level)) => { - anyhow::bail!("Unexpected heading level: {level}"); - } - Event::Start(Tag::CodeBlock(kind)) => { - match kind { - CodeBlockKind::Fenced(info) => { - block_info = info; - } - CodeBlockKind::Indented => { - anyhow::bail!("Unexpected indented codeblock"); - } - }; - } - Event::Start(_) => { - text.clear(); - block_info = "".into(); - } - Event::End(TagEnd::CodeBlock) => { - let block_info = block_info.trim(); - match current_section { - Section::UncommittedDiff => { - named.example.uncommitted_diff = mem::take(&mut text); - } - Section::EditHistory => { - named.example.edit_history.push_str(&mem::take(&mut text)); - } - Section::CursorPosition => { - named.example.cursor_path = block_info.into(); - named.example.cursor_position = mem::take(&mut text); - } - Section::ExpectedExcerpts => { - let text = mem::take(&mut text); - for excerpt in text.split("\n…\n") { - let (mut text, required_lines) = extract_required_lines(&excerpt); - if !text.ends_with('\n') { - text.push('\n'); - } - let alternatives = &mut named - .example - .expected_context - .last_mut() - .unwrap() - .alternatives; - - if alternatives.is_empty() { - alternatives.push(ExpectedExcerptSet { - heading: String::new(), - excerpts: vec![], - }); - } - - alternatives - .last_mut() - .unwrap() - .excerpts - .push(ExpectedExcerpt { - path: block_info.into(), - text, - required_lines, - }); - } - } - Section::ExpectedPatch => { - named.example.expected_patch = mem::take(&mut text); - } - Section::Other => {} - } - } - _ => {} - } - } - - if named.example.cursor_path.as_path() == Path::new("") - || named.example.cursor_position.is_empty() - { - anyhow::bail!("Missing cursor position codeblock"); - } - - Ok(named) - } - - pub fn write(&self, format: ExampleFormat, mut out: impl Write) -> Result<()> { - match format { - ExampleFormat::Json => Ok(serde_json::to_writer(out, &self.example)?), - ExampleFormat::Toml => { - Ok(out.write_all(toml::to_string_pretty(&self.example)?.as_bytes())?) - } - ExampleFormat::Md => Ok(write!(out, "{}", self)?), - } - } - - pub async fn setup_worktree(&self) -> Result { - let (repo_owner, repo_name) = self.repo_name()?; - let file_name = self.file_name(); - - fs::create_dir_all(&*REPOS_DIR)?; - fs::create_dir_all(&*WORKTREES_DIR)?; - - let repo_dir = REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref()); - let repo_lock = lock_repo(&repo_dir).await; - - if !repo_dir.is_dir() { - fs::create_dir_all(&repo_dir)?; - run_git(&repo_dir, &["init"]).await?; - run_git( - &repo_dir, - &["remote", "add", "origin", &self.example.repository_url], - ) - .await?; - } - - // Resolve the example to a revision, fetching it if needed. - let revision = run_git(&repo_dir, &["rev-parse", &self.example.revision]).await; - let revision = if let Ok(revision) = revision { - revision - } else { - run_git( - &repo_dir, - &["fetch", "--depth", "1", "origin", &self.example.revision], - ) - .await?; - let revision = run_git(&repo_dir, &["rev-parse", "FETCH_HEAD"]).await?; - if revision != self.example.revision { - run_git(&repo_dir, &["tag", &self.example.revision, &revision]).await?; - } - revision - }; - - // Create the worktree for this example if needed. - let worktree_path = WORKTREES_DIR.join(&file_name); - if worktree_path.is_dir() { - run_git(&worktree_path, &["clean", "--force", "-d"]).await?; - run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?; - run_git(&worktree_path, &["checkout", revision.as_str()]).await?; - } else { - let worktree_path_string = worktree_path.to_string_lossy(); - run_git(&repo_dir, &["branch", "-f", &file_name, revision.as_str()]).await?; - run_git( - &repo_dir, - &["worktree", "add", "-f", &worktree_path_string, &file_name], - ) - .await?; - } - drop(repo_lock); - - // Apply the uncommitted diff for this example. - if !self.example.uncommitted_diff.is_empty() { - let mut apply_process = smol::process::Command::new("git") - .current_dir(&worktree_path) - .args(&["apply", "-"]) - .stdin(std::process::Stdio::piped()) - .spawn()?; - - let mut stdin = apply_process.stdin.take().unwrap(); - stdin - .write_all(self.example.uncommitted_diff.as_bytes()) - .await?; - stdin.close().await?; - drop(stdin); - - let apply_result = apply_process.output().await?; - if !apply_result.status.success() { - anyhow::bail!( - "Failed to apply uncommitted diff patch with status: {}\nstderr:\n{}\nstdout:\n{}", - apply_result.status, - String::from_utf8_lossy(&apply_result.stderr), - String::from_utf8_lossy(&apply_result.stdout), - ); - } - } - - Ok(worktree_path) - } - - fn file_name(&self) -> String { - self.name - .chars() - .map(|c| { - if c.is_whitespace() { - '-' - } else { - c.to_ascii_lowercase() - } - }) - .collect() - } - - fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> { - // git@github.com:owner/repo.git - if self.example.repository_url.contains('@') { - let (owner, repo) = self - .example - .repository_url - .split_once(':') - .context("expected : in git url")? - .1 - .split_once('/') - .context("expected / in git url")?; - Ok(( - Cow::Borrowed(owner), - Cow::Borrowed(repo.trim_end_matches(".git")), - )) - // http://github.com/owner/repo.git - } else { - let url = Url::parse(&self.example.repository_url)?; - let mut segments = url.path_segments().context("empty http url")?; - let owner = segments - .next() - .context("expected owner path segment")? - .to_string(); - let repo = segments - .next() - .context("expected repo path segment")? - .trim_end_matches(".git") - .to_string(); - assert!(segments.next().is_none()); - - Ok((owner.into(), repo.into())) - } - } - - pub async fn cursor_position( - &self, - project: &Entity, - cx: &mut AsyncApp, - ) -> Result<(Entity, Anchor)> { - let worktree = project.read_with(cx, |project, cx| { - project.visible_worktrees(cx).next().unwrap() - })?; - let cursor_path = RelPath::new(&self.example.cursor_path, PathStyle::Posix)?.into_arc(); - let cursor_buffer = project - .update(cx, |project, cx| { - project.open_buffer( - ProjectPath { - worktree_id: worktree.read(cx).id(), - path: cursor_path, - }, - cx, - ) - })? - .await?; - let cursor_offset_within_excerpt = self - .example - .cursor_position - .find(CURSOR_MARKER) - .ok_or_else(|| anyhow!("missing cursor marker"))?; - let mut cursor_excerpt = self.example.cursor_position.clone(); - cursor_excerpt.replace_range( - cursor_offset_within_excerpt..(cursor_offset_within_excerpt + CURSOR_MARKER.len()), - "", - ); - let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| { - let text = buffer.text(); - - let mut matches = text.match_indices(&cursor_excerpt); - let Some((excerpt_offset, _)) = matches.next() else { - anyhow::bail!( - "Cursor excerpt did not exist in buffer.\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n" - ); - }; - assert!(matches.next().is_none()); - - Ok(excerpt_offset) - })??; - - let cursor_offset = excerpt_offset + cursor_offset_within_excerpt; - let cursor_anchor = - cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?; - Ok((cursor_buffer, cursor_anchor)) - } - - #[must_use] - pub async fn apply_edit_history( - &self, - project: &Entity, - cx: &mut AsyncApp, - ) -> Result> { - zeta2::udiff::apply_diff(&self.example.edit_history, project, cx).await - } -} - -fn extract_required_lines(text: &str) -> (String, Vec) { - const MARKER: &str = "[ZETA]"; - let mut new_text = String::new(); - let mut required_lines = Vec::new(); - let mut skipped_lines = 0_u32; - - for (row, mut line) in text.split('\n').enumerate() { - if let Some(marker_column) = line.find(MARKER) { - let mut strip_column = marker_column; - - while strip_column > 0 { - let prev_char = line[strip_column - 1..].chars().next().unwrap(); - if prev_char.is_whitespace() || ['/', '#'].contains(&prev_char) { - strip_column -= 1; - } else { - break; - } - } - - let metadata = &line[marker_column + MARKER.len()..]; - if metadata.contains("required") { - required_lines.push(Line(row as u32 - skipped_lines)); - } - - if strip_column == 0 { - skipped_lines += 1; - continue; - } - - line = &line[..strip_column]; - } - - new_text.push_str(line); - new_text.push('\n'); - } - - new_text.pop(); - - (new_text, required_lines) -} - -async fn run_git(repo_path: &Path, args: &[&str]) -> Result { - let output = smol::process::Command::new("git") - .current_dir(repo_path) - .args(args) - .output() - .await?; - - anyhow::ensure!( - output.status.success(), - "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}", - args.join(" "), - repo_path.display(), - output.status, - String::from_utf8_lossy(&output.stderr), - String::from_utf8_lossy(&output.stdout), - ); - Ok(String::from_utf8(output.stdout)?.trim().to_string()) -} - -impl Display for NamedExample { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "# {}\n\n", self.name)?; - write!( - f, - "{REPOSITORY_URL_FIELD} = {}\n", - self.example.repository_url - )?; - write!(f, "{REVISION_FIELD} = {}\n\n", self.example.revision)?; - - write!(f, "## {UNCOMMITTED_DIFF_HEADING}\n\n")?; - write!(f, "`````diff\n")?; - write!(f, "{}", self.example.uncommitted_diff)?; - write!(f, "`````\n")?; - - if !self.example.edit_history.is_empty() { - write!(f, "`````diff\n{}`````\n", self.example.edit_history)?; - } - - write!( - f, - "## {CURSOR_POSITION_HEADING}\n\n`````{}\n{}`````\n", - self.example.cursor_path.display(), - self.example.cursor_position - )?; - write!(f, "## {EDIT_HISTORY_HEADING}\n\n")?; - - if !self.example.expected_patch.is_empty() { - write!( - f, - "\n## {EXPECTED_PATCH_HEADING}\n\n`````diff\n{}`````\n", - self.example.expected_patch - )?; - } - - if !self.example.expected_context.is_empty() { - write!(f, "\n## {EXPECTED_CONTEXT_HEADING}\n\n")?; - - for entry in &self.example.expected_context { - write!(f, "\n### {}\n\n", entry.heading)?; - - let skip_h4 = - entry.alternatives.len() == 1 && entry.alternatives[0].heading.is_empty(); - - for excerpt_set in &entry.alternatives { - if !skip_h4 { - write!(f, "\n#### {}\n\n", excerpt_set.heading)?; - } - - for excerpt in &excerpt_set.excerpts { - write!( - f, - "`````{}{}\n{}`````\n\n", - excerpt - .path - .extension() - .map(|ext| format!("{} ", ext.to_string_lossy())) - .unwrap_or_default(), - excerpt.path.display(), - excerpt.text - )?; - } - } - } - } - - Ok(()) - } -} - -thread_local! { - static REPO_LOCKS: RefCell>>> = RefCell::new(HashMap::default()); -} - -#[must_use] -pub async fn lock_repo(path: impl AsRef) -> OwnedMutexGuard<()> { - REPO_LOCKS - .with(|cell| { - cell.borrow_mut() - .entry(path.as_ref().to_path_buf()) - .or_default() - .clone() - }) - .lock_owned() - .await -} - -#[cfg(test)] -mod tests { - use super::*; - use indoc::indoc; - use pretty_assertions::assert_eq; - - #[test] - fn test_extract_required_lines() { - let input = indoc! {" - zero - one // [ZETA] required - two - // [ZETA] something - three - four # [ZETA] required - five - "}; - - let expected_updated_input = indoc! {" - zero - one - two - three - four - five - "}; - - let expected_required_lines = vec![Line(1), Line(4)]; - - let (updated_input, required_lines) = extract_required_lines(input); - assert_eq!(updated_input, expected_updated_input); - assert_eq!(required_lines, expected_required_lines); - } -} diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs deleted file mode 100644 index 82760d6061d9b96a2da74bf5cb24e43d9ecdba60..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/main.rs +++ /dev/null @@ -1,479 +0,0 @@ -mod evaluate; -mod example; -mod headless; -mod paths; -mod predict; -mod source_location; -mod syntax_retrieval_stats; -mod util; - -use crate::evaluate::{EvaluateArguments, run_evaluate}; -use crate::example::{ExampleFormat, NamedExample}; -use crate::predict::{PredictArguments, run_zeta2_predict}; -use crate::syntax_retrieval_stats::retrieval_stats; -use ::util::paths::PathStyle; -use anyhow::{Result, anyhow}; -use clap::{Args, Parser, Subcommand}; -use cloud_llm_client::predict_edits_v3; -use edit_prediction_context::{ - EditPredictionContextOptions, EditPredictionExcerptOptions, EditPredictionScoreOptions, -}; -use gpui::{Application, AsyncApp, Entity, prelude::*}; -use language::{Bias, Buffer, BufferSnapshot, Point}; -use project::{Project, Worktree}; -use reqwest_client::ReqwestClient; -use serde_json::json; -use std::io::{self}; -use std::time::Duration; -use std::{collections::HashSet, path::PathBuf, str::FromStr, sync::Arc}; -use zeta2::ContextMode; - -use crate::headless::ZetaCliAppState; -use crate::source_location::SourceLocation; -use crate::util::{open_buffer, open_buffer_with_language_server}; - -#[derive(Parser, Debug)] -#[command(name = "zeta")] -struct ZetaCliArgs { - #[command(subcommand)] - command: Command, -} - -#[derive(Subcommand, Debug)] -enum Command { - Zeta1 { - #[command(subcommand)] - command: Zeta1Command, - }, - Zeta2 { - #[command(subcommand)] - command: Zeta2Command, - }, - ConvertExample { - path: PathBuf, - #[arg(long, value_enum, default_value_t = ExampleFormat::Md)] - output_format: ExampleFormat, - }, -} - -#[derive(Subcommand, Debug)] -enum Zeta1Command { - Context { - #[clap(flatten)] - context_args: ContextArgs, - }, -} - -#[derive(Subcommand, Debug)] -enum Zeta2Command { - Syntax { - #[clap(flatten)] - args: Zeta2Args, - #[clap(flatten)] - syntax_args: Zeta2SyntaxArgs, - #[command(subcommand)] - command: Zeta2SyntaxCommand, - }, - Predict(PredictArguments), - Eval(EvaluateArguments), -} - -#[derive(Subcommand, Debug)] -enum Zeta2SyntaxCommand { - Context { - #[clap(flatten)] - context_args: ContextArgs, - }, - Stats { - #[arg(long)] - worktree: PathBuf, - #[arg(long)] - extension: Option, - #[arg(long)] - limit: Option, - #[arg(long)] - skip: Option, - }, -} - -#[derive(Debug, Args)] -#[group(requires = "worktree")] -struct ContextArgs { - #[arg(long)] - worktree: PathBuf, - #[arg(long)] - cursor: SourceLocation, - #[arg(long)] - use_language_server: bool, - #[arg(long)] - edit_history: Option, -} - -#[derive(Debug, Args)] -struct Zeta2Args { - #[arg(long, default_value_t = 8192)] - max_prompt_bytes: usize, - #[arg(long, default_value_t = 2048)] - max_excerpt_bytes: usize, - #[arg(long, default_value_t = 1024)] - min_excerpt_bytes: usize, - #[arg(long, default_value_t = 0.66)] - target_before_cursor_over_total_bytes: f32, - #[arg(long, default_value_t = 1024)] - max_diagnostic_bytes: usize, - #[arg(long, value_enum, default_value_t = PromptFormat::default())] - prompt_format: PromptFormat, - #[arg(long, value_enum, default_value_t = Default::default())] - output_format: OutputFormat, - #[arg(long, default_value_t = 42)] - file_indexing_parallelism: usize, -} - -#[derive(Debug, Args)] -struct Zeta2SyntaxArgs { - #[arg(long, default_value_t = false)] - disable_imports_gathering: bool, - #[arg(long, default_value_t = u8::MAX)] - max_retrieved_definitions: u8, -} - -fn syntax_args_to_options( - zeta2_args: &Zeta2Args, - syntax_args: &Zeta2SyntaxArgs, - omit_excerpt_overlaps: bool, -) -> zeta2::ZetaOptions { - zeta2::ZetaOptions { - context: ContextMode::Syntax(EditPredictionContextOptions { - max_retrieved_declarations: syntax_args.max_retrieved_definitions, - use_imports: !syntax_args.disable_imports_gathering, - excerpt: EditPredictionExcerptOptions { - max_bytes: zeta2_args.max_excerpt_bytes, - min_bytes: zeta2_args.min_excerpt_bytes, - target_before_cursor_over_total_bytes: zeta2_args - .target_before_cursor_over_total_bytes, - }, - score: EditPredictionScoreOptions { - omit_excerpt_overlaps, - }, - }), - max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes, - max_prompt_bytes: zeta2_args.max_prompt_bytes, - prompt_format: zeta2_args.prompt_format.into(), - file_indexing_parallelism: zeta2_args.file_indexing_parallelism, - buffer_change_grouping_interval: Duration::ZERO, - } -} - -#[derive(clap::ValueEnum, Default, Debug, Clone, Copy)] -enum PromptFormat { - MarkedExcerpt, - LabeledSections, - OnlySnippets, - #[default] - NumberedLines, - OldTextNewText, -} - -impl Into for PromptFormat { - fn into(self) -> predict_edits_v3::PromptFormat { - match self { - Self::MarkedExcerpt => predict_edits_v3::PromptFormat::MarkedExcerpt, - Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections, - Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets, - Self::NumberedLines => predict_edits_v3::PromptFormat::NumLinesUniDiff, - Self::OldTextNewText => predict_edits_v3::PromptFormat::OldTextNewText, - } - } -} - -#[derive(clap::ValueEnum, Default, Debug, Clone)] -enum OutputFormat { - #[default] - Prompt, - Request, - Full, -} - -#[derive(Debug, Clone)] -enum FileOrStdin { - File(PathBuf), - Stdin, -} - -impl FileOrStdin { - async fn read_to_string(&self) -> Result { - match self { - FileOrStdin::File(path) => smol::fs::read_to_string(path).await, - FileOrStdin::Stdin => smol::unblock(|| std::io::read_to_string(std::io::stdin())).await, - } - } -} - -impl FromStr for FileOrStdin { - type Err = ::Err; - - fn from_str(s: &str) -> Result { - match s { - "-" => Ok(Self::Stdin), - _ => Ok(Self::File(PathBuf::from_str(s)?)), - } - } -} - -struct LoadedContext { - full_path_str: String, - snapshot: BufferSnapshot, - clipped_cursor: Point, - worktree: Entity, - project: Entity, - buffer: Entity, -} - -async fn load_context( - args: &ContextArgs, - app_state: &Arc, - cx: &mut AsyncApp, -) -> Result { - let ContextArgs { - worktree: worktree_path, - cursor, - use_language_server, - .. - } = args; - - let worktree_path = worktree_path.canonicalize()?; - - let project = cx.update(|cx| { - Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ) - })?; - - let worktree = project - .update(cx, |project, cx| { - project.create_worktree(&worktree_path, true, cx) - })? - .await?; - - let mut ready_languages = HashSet::default(); - let (_lsp_open_handle, buffer) = if *use_language_server { - let (lsp_open_handle, _, buffer) = open_buffer_with_language_server( - project.clone(), - worktree.clone(), - cursor.path.clone(), - &mut ready_languages, - cx, - ) - .await?; - (Some(lsp_open_handle), buffer) - } else { - let buffer = - open_buffer(project.clone(), worktree.clone(), cursor.path.clone(), cx).await?; - (None, buffer) - }; - - let full_path_str = worktree - .read_with(cx, |worktree, _| worktree.root_name().join(&cursor.path))? - .display(PathStyle::local()) - .to_string(); - - let snapshot = cx.update(|cx| buffer.read(cx).snapshot())?; - let clipped_cursor = snapshot.clip_point(cursor.point, Bias::Left); - if clipped_cursor != cursor.point { - let max_row = snapshot.max_point().row; - if cursor.point.row < max_row { - return Err(anyhow!( - "Cursor position {:?} is out of bounds (line length is {})", - cursor.point, - snapshot.line_len(cursor.point.row) - )); - } else { - return Err(anyhow!( - "Cursor position {:?} is out of bounds (max row is {})", - cursor.point, - max_row - )); - } - } - - Ok(LoadedContext { - full_path_str, - snapshot, - clipped_cursor, - worktree, - project, - buffer, - }) -} - -async fn zeta2_syntax_context( - zeta2_args: Zeta2Args, - syntax_args: Zeta2SyntaxArgs, - args: ContextArgs, - app_state: &Arc, - cx: &mut AsyncApp, -) -> Result { - let LoadedContext { - worktree, - project, - buffer, - clipped_cursor, - .. - } = load_context(&args, app_state, cx).await?; - - // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for - // the whole worktree. - worktree - .read_with(cx, |worktree, _cx| { - worktree.as_local().unwrap().scan_complete() - })? - .await; - let output = cx - .update(|cx| { - let zeta = cx.new(|cx| { - zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx) - }); - let indexing_done_task = zeta.update(cx, |zeta, cx| { - zeta.set_options(syntax_args_to_options(&zeta2_args, &syntax_args, true)); - zeta.register_buffer(&buffer, &project, cx); - zeta.wait_for_initial_indexing(&project, cx) - }); - cx.spawn(async move |cx| { - indexing_done_task.await?; - let request = zeta - .update(cx, |zeta, cx| { - let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor); - zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx) - })? - .await?; - - let (prompt_string, section_labels) = cloud_zeta2_prompt::build_prompt(&request)?; - - match zeta2_args.output_format { - OutputFormat::Prompt => anyhow::Ok(prompt_string), - OutputFormat::Request => anyhow::Ok(serde_json::to_string_pretty(&request)?), - OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({ - "request": request, - "prompt": prompt_string, - "section_labels": section_labels, - }))?), - } - }) - })? - .await?; - - Ok(output) -} - -async fn zeta1_context( - args: ContextArgs, - app_state: &Arc, - cx: &mut AsyncApp, -) -> Result { - let LoadedContext { - full_path_str, - snapshot, - clipped_cursor, - .. - } = load_context(&args, app_state, cx).await?; - - let events = match args.edit_history { - Some(events) => events.read_to_string().await?, - None => String::new(), - }; - - let prompt_for_events = move || (events, 0); - cx.update(|cx| { - zeta::gather_context( - full_path_str, - &snapshot, - clipped_cursor, - prompt_for_events, - cx, - ) - })? - .await -} - -fn main() { - zlog::init(); - zlog::init_output_stderr(); - let args = ZetaCliArgs::parse(); - let http_client = Arc::new(ReqwestClient::new()); - let app = Application::headless().with_http_client(http_client); - - app.run(move |cx| { - let app_state = Arc::new(headless::init(cx)); - cx.spawn(async move |cx| { - match args.command { - Command::Zeta1 { - command: Zeta1Command::Context { context_args }, - } => { - let context = zeta1_context(context_args, &app_state, cx).await.unwrap(); - let result = serde_json::to_string_pretty(&context.body).unwrap(); - println!("{}", result); - } - Command::Zeta2 { command } => match command { - Zeta2Command::Predict(arguments) => { - run_zeta2_predict(arguments, &app_state, cx).await; - } - Zeta2Command::Eval(arguments) => { - run_evaluate(arguments, &app_state, cx).await; - } - Zeta2Command::Syntax { - args, - syntax_args, - command, - } => { - let result = match command { - Zeta2SyntaxCommand::Context { context_args } => { - zeta2_syntax_context( - args, - syntax_args, - context_args, - &app_state, - cx, - ) - .await - } - Zeta2SyntaxCommand::Stats { - worktree, - extension, - limit, - skip, - } => { - retrieval_stats( - worktree, - app_state, - extension, - limit, - skip, - syntax_args_to_options(&args, &syntax_args, false), - cx, - ) - .await - } - }; - println!("{}", result.unwrap()); - } - }, - Command::ConvertExample { - path, - output_format, - } => { - let example = NamedExample::load(path).unwrap(); - example.write(output_format, io::stdout()).unwrap(); - } - }; - - let _ = cx.update(|cx| cx.quit()); - }) - .detach(); - }); -} diff --git a/crates/zeta_cli/src/paths.rs b/crates/zeta_cli/src/paths.rs deleted file mode 100644 index 144bf6f5dd97c518d965d7bd23da83ce7f11f66f..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/paths.rs +++ /dev/null @@ -1,8 +0,0 @@ -use std::{env, path::PathBuf, sync::LazyLock}; - -static TARGET_DIR: LazyLock = LazyLock::new(|| env::current_dir().unwrap().join("target")); -pub static CACHE_DIR: LazyLock = - LazyLock::new(|| TARGET_DIR.join("zeta-llm-response-cache")); -pub static REPOS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-repos")); -pub static WORKTREES_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees")); -pub static LOGS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-logs")); diff --git a/crates/zeta_cli/src/predict.rs b/crates/zeta_cli/src/predict.rs deleted file mode 100644 index 4efc82fa8a7c5d5cf6773a7f771d12dd89b4e1ed..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/predict.rs +++ /dev/null @@ -1,411 +0,0 @@ -use crate::PromptFormat; -use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample}; -use crate::headless::ZetaCliAppState; -use crate::paths::{CACHE_DIR, LOGS_DIR}; -use ::serde::Serialize; -use anyhow::{Result, anyhow}; -use clap::Args; -use collections::HashMap; -use gpui::http_client::Url; -use language::{Anchor, Buffer, Point}; -// use cloud_llm_client::predict_edits_v3::PromptFormat; -use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock}; -use futures::StreamExt as _; -use gpui::{AppContext, AsyncApp, Entity}; -use project::Project; -use serde::Deserialize; -use std::cell::Cell; -use std::fs; -use std::io::Write; -use std::ops::Range; -use std::path::PathBuf; -use std::sync::Arc; -use std::sync::Mutex; -use std::time::{Duration, Instant}; -use zeta2::LlmResponseCache; - -#[derive(Debug, Args)] -pub struct PredictArguments { - #[arg(long, value_enum, default_value_t = PromptFormat::default())] - prompt_format: PromptFormat, - #[arg(long)] - use_expected_context: bool, - #[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)] - format: PredictionsOutputFormat, - example_path: PathBuf, - #[clap(long)] - skip_cache: bool, -} - -#[derive(clap::ValueEnum, Debug, Clone)] -pub enum PredictionsOutputFormat { - Json, - Md, - Diff, -} - -pub async fn run_zeta2_predict( - args: PredictArguments, - app_state: &Arc, - cx: &mut AsyncApp, -) { - let example = NamedExample::load(args.example_path).unwrap(); - let result = zeta2_predict( - example, - args.skip_cache, - args.prompt_format, - args.use_expected_context, - &app_state, - cx, - ) - .await - .unwrap(); - result.write(args.format, std::io::stdout()).unwrap(); -} - -thread_local! { - static AUTHENTICATED: Cell = const { Cell::new(false) }; -} - -pub async fn zeta2_predict( - example: NamedExample, - skip_cache: bool, - prompt_format: PromptFormat, - use_expected_context: bool, - app_state: &Arc, - cx: &mut AsyncApp, -) -> Result { - fs::create_dir_all(&*LOGS_DIR)?; - let worktree_path = example.setup_worktree().await?; - - if !AUTHENTICATED.get() { - AUTHENTICATED.set(true); - - app_state - .client - .sign_in_with_optional_connect(true, cx) - .await?; - } - - let project = cx.update(|cx| { - Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ) - })?; - - let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?; - - let worktree = project - .update(cx, |project, cx| { - project.create_worktree(&worktree_path, true, cx) - })? - .await?; - worktree - .read_with(cx, |worktree, _cx| { - worktree.as_local().unwrap().scan_complete() - })? - .await; - - let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?; - - zeta.update(cx, |zeta, _cx| { - zeta.with_llm_response_cache(Arc::new(Cache { skip_cache })); - })?; - - cx.subscribe(&buffer_store, { - let project = project.clone(); - move |_, event, cx| match event { - project::buffer_store::BufferStoreEvent::BufferAdded(buffer) => { - zeta2::Zeta::try_global(cx) - .unwrap() - .update(cx, |zeta, cx| zeta.register_buffer(&buffer, &project, cx)); - } - _ => {} - } - })? - .detach(); - - let _edited_buffers = example.apply_edit_history(&project, cx).await?; - let (cursor_buffer, cursor_anchor) = example.cursor_position(&project, cx).await?; - - let result = Arc::new(Mutex::new(PredictionDetails::default())); - let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info())?; - - let debug_task = cx.background_spawn({ - let result = result.clone(); - async move { - let mut start_time = None; - let mut search_queries_generated_at = None; - let mut search_queries_executed_at = None; - while let Some(event) = debug_rx.next().await { - match event { - zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => { - start_time = Some(info.timestamp); - fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?; - } - zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => { - search_queries_generated_at = Some(info.timestamp); - fs::write( - LOGS_DIR.join("search_queries.json"), - serde_json::to_string_pretty(&info.search_queries).unwrap(), - )?; - } - zeta2::ZetaDebugInfo::SearchQueriesExecuted(info) => { - search_queries_executed_at = Some(info.timestamp); - } - zeta2::ZetaDebugInfo::ContextRetrievalFinished(_info) => {} - zeta2::ZetaDebugInfo::EditPredictionRequested(request) => { - let prediction_started_at = Instant::now(); - start_time.get_or_insert(prediction_started_at); - fs::write( - LOGS_DIR.join("prediction_prompt.md"), - &request.local_prompt.unwrap_or_default(), - )?; - - { - let mut result = result.lock().unwrap(); - - for included_file in request.request.included_files { - let insertions = - vec![(request.request.cursor_point, CURSOR_MARKER)]; - result.excerpts.extend(included_file.excerpts.iter().map( - |excerpt| ActualExcerpt { - path: included_file.path.components().skip(1).collect(), - text: String::from(excerpt.text.as_ref()), - }, - )); - write_codeblock( - &included_file.path, - included_file.excerpts.iter(), - if included_file.path == request.request.excerpt_path { - &insertions - } else { - &[] - }, - included_file.max_row, - false, - &mut result.excerpts_text, - ); - } - } - - let response = request.response_rx.await?.0.map_err(|err| anyhow!(err))?; - let response = zeta2::text_from_response(response).unwrap_or_default(); - let prediction_finished_at = Instant::now(); - fs::write(LOGS_DIR.join("prediction_response.md"), &response)?; - - let mut result = result.lock().unwrap(); - - if !use_expected_context { - result.planning_search_time = - Some(search_queries_generated_at.unwrap() - start_time.unwrap()); - result.running_search_time = Some( - search_queries_executed_at.unwrap() - - search_queries_generated_at.unwrap(), - ); - } - result.prediction_time = prediction_finished_at - prediction_started_at; - result.total_time = prediction_finished_at - start_time.unwrap(); - - break; - } - } - } - anyhow::Ok(()) - } - }); - - zeta.update(cx, |zeta, _cx| { - let mut options = zeta.options().clone(); - options.prompt_format = prompt_format.into(); - zeta.set_options(options); - })?; - - if use_expected_context { - let context_excerpts_tasks = example - .example - .expected_context - .iter() - .flat_map(|section| { - section.alternatives[0].excerpts.iter().map(|excerpt| { - resolve_context_entry(project.clone(), excerpt.clone(), cx.clone()) - }) - }) - .collect::>(); - let context_excerpts_vec = futures::future::try_join_all(context_excerpts_tasks).await?; - - let mut context_excerpts = HashMap::default(); - for (buffer, mut excerpts) in context_excerpts_vec { - context_excerpts - .entry(buffer) - .or_insert(Vec::new()) - .append(&mut excerpts); - } - - zeta.update(cx, |zeta, _cx| { - zeta.set_context(project.clone(), context_excerpts) - })?; - } else { - zeta.update(cx, |zeta, cx| { - zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx) - })? - .await?; - } - - let prediction = zeta - .update(cx, |zeta, cx| { - zeta.request_prediction(&project, &cursor_buffer, cursor_anchor, cx) - })? - .await?; - - debug_task.await?; - - let mut result = Arc::into_inner(result).unwrap().into_inner().unwrap(); - result.diff = prediction - .map(|prediction| { - let old_text = prediction.snapshot.text(); - let new_text = prediction - .buffer - .update(cx, |buffer, cx| { - buffer.edit(prediction.edits.iter().cloned(), None, cx); - buffer.text() - }) - .unwrap(); - language::unified_diff(&old_text, &new_text) - }) - .unwrap_or_default(); - - anyhow::Ok(result) -} - -async fn resolve_context_entry( - project: Entity, - excerpt: ExpectedExcerpt, - mut cx: AsyncApp, -) -> Result<(Entity, Vec>)> { - let buffer = project - .update(&mut cx, |project, cx| { - let project_path = project.find_project_path(&excerpt.path, cx).unwrap(); - project.open_buffer(project_path, cx) - })? - .await?; - - let ranges = buffer.read_with(&mut cx, |buffer, _| { - let full_text = buffer.text(); - let offset = full_text - .find(&excerpt.text) - .expect("Expected context not found"); - let point = buffer.offset_to_point(offset); - excerpt - .required_lines - .iter() - .map(|line| { - let row = point.row + line.0; - let range = Point::new(row, 0)..Point::new(row + 1, 0); - buffer.anchor_after(range.start)..buffer.anchor_before(range.end) - }) - .collect() - })?; - - Ok((buffer, ranges)) -} - -struct Cache { - skip_cache: bool, -} - -impl Cache { - fn path(key: u64) -> PathBuf { - CACHE_DIR.join(format!("{key:x}.json")) - } -} - -impl LlmResponseCache for Cache { - fn get_key(&self, url: &Url, body: &str) -> u64 { - use collections::FxHasher; - use std::hash::{Hash, Hasher}; - - let mut hasher = FxHasher::default(); - url.hash(&mut hasher); - body.hash(&mut hasher); - hasher.finish() - } - - fn read_response(&self, key: u64) -> Option { - let path = Cache::path(key); - if path.exists() { - if self.skip_cache { - log::info!("Skipping existing cached LLM response: {}", path.display()); - None - } else { - log::info!("Using LLM response from cache: {}", path.display()); - Some(fs::read_to_string(path).unwrap()) - } - } else { - None - } - } - - fn write_response(&self, key: u64, value: &str) { - fs::create_dir_all(&*CACHE_DIR).unwrap(); - - let path = Cache::path(key); - log::info!("Writing LLM response to cache: {}", path.display()); - fs::write(path, value).unwrap(); - } -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize)] -pub struct PredictionDetails { - pub diff: String, - pub excerpts: Vec, - pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly - pub planning_search_time: Option, - pub running_search_time: Option, - pub prediction_time: Duration, - pub total_time: Duration, -} - -impl PredictionDetails { - pub fn write(&self, format: PredictionsOutputFormat, mut out: impl Write) -> Result<()> { - let formatted = match format { - PredictionsOutputFormat::Md => self.to_markdown(), - PredictionsOutputFormat::Json => serde_json::to_string_pretty(self)?, - PredictionsOutputFormat::Diff => self.diff.clone(), - }; - - Ok(out.write_all(formatted.as_bytes())?) - } - - pub fn to_markdown(&self) -> String { - let inference_time = self.planning_search_time.unwrap_or_default() + self.prediction_time; - - format!( - "## Excerpts\n\n\ - {}\n\n\ - ## Prediction\n\n\ - {}\n\n\ - ## Time\n\n\ - Planning searches: {}ms\n\ - Running searches: {}ms\n\ - Making Prediction: {}ms\n\n\ - -------------------\n\n\ - Total: {}ms\n\ - Inference: {}ms ({:.2}%)\n", - self.excerpts_text, - self.diff, - self.planning_search_time.unwrap_or_default().as_millis(), - self.running_search_time.unwrap_or_default().as_millis(), - self.prediction_time.as_millis(), - self.total_time.as_millis(), - inference_time.as_millis(), - (inference_time.as_millis() as f64 / self.total_time.as_millis() as f64) * 100. - ) - } -} diff --git a/crates/zeta_cli/src/source_location.rs b/crates/zeta_cli/src/source_location.rs deleted file mode 100644 index 3438675e78ac4d8bba6f58f7ce8a9016aed6c0c7..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/source_location.rs +++ /dev/null @@ -1,70 +0,0 @@ -use std::{fmt, fmt::Display, path::Path, str::FromStr, sync::Arc}; - -use ::util::{paths::PathStyle, rel_path::RelPath}; -use anyhow::{Result, anyhow}; -use language::Point; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -#[derive(Debug, Clone, Hash, Eq, PartialEq)] -pub struct SourceLocation { - pub path: Arc, - pub point: Point, -} - -impl Serialize for SourceLocation { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> Deserialize<'de> for SourceLocation { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - s.parse().map_err(serde::de::Error::custom) - } -} - -impl Display for SourceLocation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "{}:{}:{}", - self.path.display(PathStyle::Posix), - self.point.row + 1, - self.point.column + 1 - ) - } -} - -impl FromStr for SourceLocation { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let parts: Vec<&str> = s.split(':').collect(); - if parts.len() != 3 { - return Err(anyhow!( - "Invalid source location. Expected 'file.rs:line:column', got '{}'", - s - )); - } - - let path = RelPath::new(Path::new(&parts[0]), PathStyle::local())?.into_arc(); - let line: u32 = parts[1] - .parse() - .map_err(|_| anyhow!("Invalid line number: '{}'", parts[1]))?; - let column: u32 = parts[2] - .parse() - .map_err(|_| anyhow!("Invalid column number: '{}'", parts[2]))?; - - // Convert from 1-based to 0-based indexing - let point = Point::new(line.saturating_sub(1), column.saturating_sub(1)); - - Ok(SourceLocation { path, point }) - } -} diff --git a/crates/zeta_cli/src/syntax_retrieval_stats.rs b/crates/zeta_cli/src/syntax_retrieval_stats.rs deleted file mode 100644 index f2634b1323d92b7136c591627226161b2905a955..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/syntax_retrieval_stats.rs +++ /dev/null @@ -1,1260 +0,0 @@ -use ::util::rel_path::RelPath; -use ::util::{RangeExt, ResultExt as _}; -use anyhow::{Context as _, Result}; -use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents; -use edit_prediction_context::{ - Declaration, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions, Identifier, - Imports, Reference, ReferenceRegion, SyntaxIndex, SyntaxIndexState, references_in_range, -}; -use futures::StreamExt as _; -use futures::channel::mpsc; -use gpui::Entity; -use gpui::{AppContext, AsyncApp}; -use language::OffsetRangeExt; -use language::{BufferSnapshot, Point}; -use ordered_float::OrderedFloat; -use polars::prelude::*; -use project::{Project, ProjectEntryId, ProjectPath, Worktree}; -use serde::{Deserialize, Serialize}; -use std::fs; -use std::{ - cmp::Reverse, - collections::{HashMap, HashSet}, - fs::File, - hash::{Hash, Hasher}, - io::{BufRead, BufReader, BufWriter, Write as _}, - ops::Range, - path::{Path, PathBuf}, - sync::{ - Arc, - atomic::{self, AtomicUsize}, - }, - time::Duration, -}; -use util::paths::PathStyle; -use zeta2::ContextMode; - -use crate::headless::ZetaCliAppState; -use crate::source_location::SourceLocation; -use crate::util::{open_buffer, open_buffer_with_language_server}; - -pub async fn retrieval_stats( - worktree: PathBuf, - app_state: Arc, - only_extension: Option, - file_limit: Option, - skip_files: Option, - options: zeta2::ZetaOptions, - cx: &mut AsyncApp, -) -> Result { - let ContextMode::Syntax(context_options) = options.context.clone() else { - anyhow::bail!("retrieval stats only works in ContextMode::Syntax"); - }; - - let options = Arc::new(options); - let worktree_path = worktree.canonicalize()?; - - let project = cx.update(|cx| { - Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ) - })?; - - let worktree = project - .update(cx, |project, cx| { - project.create_worktree(&worktree_path, true, cx) - })? - .await?; - - // wait for worktree scan so that wait_for_initial_file_indexing waits for the whole worktree. - worktree - .read_with(cx, |worktree, _cx| { - worktree.as_local().unwrap().scan_complete() - })? - .await; - - let index = cx.new(|cx| SyntaxIndex::new(&project, options.file_indexing_parallelism, cx))?; - index - .read_with(cx, |index, cx| index.wait_for_initial_file_indexing(cx))? - .await?; - let indexed_files = index - .read_with(cx, |index, cx| index.indexed_file_paths(cx))? - .await; - let mut filtered_files = indexed_files - .into_iter() - .filter(|project_path| { - let file_extension = project_path.path.extension(); - if let Some(only_extension) = only_extension.as_ref() { - file_extension.is_some_and(|extension| extension == only_extension) - } else { - file_extension - .is_some_and(|extension| !["md", "json", "sh", "diff"].contains(&extension)) - } - }) - .collect::>(); - filtered_files.sort_by(|a, b| a.path.cmp(&b.path)); - - let index_state = index.read_with(cx, |index, _cx| index.state().clone())?; - cx.update(|_| { - drop(index); - })?; - let index_state = Arc::new( - Arc::into_inner(index_state) - .context("Index state had more than 1 reference")? - .into_inner(), - ); - - struct FileSnapshot { - project_entry_id: ProjectEntryId, - snapshot: BufferSnapshot, - hash: u64, - parent_abs_path: Arc, - } - - let files: Vec = futures::future::try_join_all({ - filtered_files - .iter() - .map(|file| { - let buffer_task = - open_buffer(project.clone(), worktree.clone(), file.path.clone(), cx); - cx.spawn(async move |cx| { - let buffer = buffer_task.await?; - let (project_entry_id, parent_abs_path, snapshot) = - buffer.read_with(cx, |buffer, cx| { - let file = project::File::from_dyn(buffer.file()).unwrap(); - let project_entry_id = file.project_entry_id().unwrap(); - let mut parent_abs_path = file.worktree.read(cx).absolutize(&file.path); - if !parent_abs_path.pop() { - panic!("Invalid worktree path"); - } - - (project_entry_id, parent_abs_path, buffer.snapshot()) - })?; - - anyhow::Ok( - cx.background_spawn(async move { - let mut hasher = collections::FxHasher::default(); - snapshot.text().hash(&mut hasher); - FileSnapshot { - project_entry_id, - snapshot, - hash: hasher.finish(), - parent_abs_path: parent_abs_path.into(), - } - }) - .await, - ) - }) - }) - .collect::>() - }) - .await?; - - let mut file_snapshots = HashMap::default(); - let mut hasher = collections::FxHasher::default(); - for FileSnapshot { - project_entry_id, - snapshot, - hash, - .. - } in &files - { - file_snapshots.insert(*project_entry_id, snapshot.clone()); - hash.hash(&mut hasher); - } - let files_hash = hasher.finish(); - let file_snapshots = Arc::new(file_snapshots); - let target_cli_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../target/zeta_cli"); - fs::create_dir_all(&target_cli_dir).unwrap(); - let target_cli_dir = target_cli_dir.canonicalize().unwrap(); - - let lsp_cache_dir = target_cli_dir.join("cache"); - fs::create_dir_all(&lsp_cache_dir).unwrap(); - - let lsp_definitions_path = lsp_cache_dir.join(format!( - "{}-{:x}.jsonl", - worktree_path.file_stem().unwrap_or_default().display(), - files_hash - )); - - let mut lsp_definitions = HashMap::default(); - let mut lsp_files = 0; - - if fs::exists(&lsp_definitions_path)? { - log::info!( - "Using cached LSP definitions from {}", - lsp_definitions_path.display() - ); - - let file = File::options() - .read(true) - .write(true) - .open(&lsp_definitions_path)?; - let lines = BufReader::new(&file).lines(); - let mut valid_len: usize = 0; - - for (line, expected_file) in lines.zip(files.iter()) { - let line = line?; - let FileLspDefinitions { path, references } = match serde_json::from_str(&line) { - Ok(ok) => ok, - Err(_) => { - log::error!("Found invalid cache line. Truncating to #{lsp_files}.",); - file.set_len(valid_len as u64)?; - break; - } - }; - let expected_path = expected_file.snapshot.file().unwrap().path().as_unix_str(); - if expected_path != path.as_ref() { - log::error!( - "Expected file #{} to be {expected_path}, but found {path}. Truncating to #{lsp_files}.", - lsp_files + 1 - ); - file.set_len(valid_len as u64)?; - break; - } - for (point, ranges) in references { - let Ok(path) = RelPath::new(Path::new(path.as_ref()), PathStyle::Posix) else { - log::warn!("Invalid path: {}", path); - continue; - }; - lsp_definitions.insert( - SourceLocation { - path: path.into_arc(), - point: point.into(), - }, - ranges, - ); - } - lsp_files += 1; - valid_len += line.len() + 1 - } - } - - if lsp_files < files.len() { - if lsp_files == 0 { - log::warn!( - "No LSP definitions found, populating {}", - lsp_definitions_path.display() - ); - } else { - log::warn!("{} files missing from LSP cache", files.len() - lsp_files); - } - - gather_lsp_definitions( - &lsp_definitions_path, - lsp_files, - &filtered_files, - &worktree, - &project, - &mut lsp_definitions, - cx, - ) - .await?; - } - let files_len = files.len().min(file_limit.unwrap_or(usize::MAX)); - let done_count = Arc::new(AtomicUsize::new(0)); - - let (output_tx, output_rx) = mpsc::unbounded::(); - - let tasks = files - .into_iter() - .skip(skip_files.unwrap_or(0)) - .take(file_limit.unwrap_or(usize::MAX)) - .map(|project_file| { - let index_state = index_state.clone(); - let lsp_definitions = lsp_definitions.clone(); - let output_tx = output_tx.clone(); - let done_count = done_count.clone(); - let file_snapshots = file_snapshots.clone(); - let context_options = context_options.clone(); - cx.background_spawn(async move { - let snapshot = project_file.snapshot; - - let full_range = 0..snapshot.len(); - let references = references_in_range( - full_range, - &snapshot.text(), - ReferenceRegion::Nearby, - &snapshot, - ); - - let imports = if context_options.use_imports { - Imports::gather(&snapshot, Some(&project_file.parent_abs_path)) - } else { - Imports::default() - }; - - let path = snapshot.file().unwrap().path(); - - for reference in references { - let query_point = snapshot.offset_to_point(reference.range.start); - let source_location = SourceLocation { - path: path.clone(), - point: query_point, - }; - let lsp_definitions = lsp_definitions - .get(&source_location) - .cloned() - .unwrap_or_else(|| { - log::warn!( - "No definitions found for source location: {:?}", - source_location - ); - Vec::new() - }); - - let retrieve_result = retrieve_definitions( - &reference, - &imports, - query_point, - &snapshot, - &index_state, - &file_snapshots, - &context_options, - ) - .await?; - - let result = ReferenceRetrievalResult { - cursor_path: path.clone(), - identifier: reference.identifier, - cursor_point: query_point, - lsp_definitions, - retrieved_definitions: retrieve_result.definitions, - excerpt_range: retrieve_result.excerpt_range, - }; - - output_tx.unbounded_send(result).ok(); - } - - println!( - "{:02}/{:02} done", - done_count.fetch_add(1, atomic::Ordering::Relaxed) + 1, - files_len, - ); - - anyhow::Ok(()) - }) - }) - .collect::>(); - - drop(output_tx); - - let df_task = cx.background_spawn(build_dataframe(output_rx)); - - futures::future::try_join_all(tasks).await?; - let mut df = df_task.await?; - - let run_id = format!( - "{}-{}", - worktree_path.file_stem().unwrap_or_default().display(), - chrono::Local::now().format("%Y%m%d_%H%M%S") - ); - let run_dir = target_cli_dir.join(run_id); - fs::create_dir(&run_dir).unwrap(); - - let parquet_path = run_dir.join("stats.parquet"); - let mut parquet_file = fs::File::create(&parquet_path)?; - - ParquetWriter::new(&mut parquet_file) - .finish(&mut df) - .unwrap(); - - let stats = SummaryStats::from_dataframe(df)?; - - let stats_path = run_dir.join("stats.txt"); - fs::write(&stats_path, format!("{}", stats))?; - - println!("{}", stats); - println!("\nWrote:"); - println!("- {}", relativize_path(&parquet_path).display()); - println!("- {}", relativize_path(&stats_path).display()); - println!("- {}", relativize_path(&lsp_definitions_path).display()); - - Ok("".to_string()) -} - -async fn build_dataframe( - mut output_rx: mpsc::UnboundedReceiver, -) -> Result { - use soa_rs::{Soa, Soars}; - - #[derive(Default, Soars)] - struct Row { - ref_id: u32, - cursor_path: String, - cursor_row: u32, - cursor_column: u32, - cursor_identifier: String, - gold_in_excerpt: bool, - gold_path: String, - gold_row: u32, - gold_column: u32, - gold_is_external: bool, - candidate_count: u32, - candidate_path: Option, - candidate_row: Option, - candidate_column: Option, - candidate_is_gold: Option, - candidate_rank: Option, - candidate_is_same_file: Option, - candidate_is_referenced_nearby: Option, - candidate_is_referenced_in_breadcrumb: Option, - candidate_reference_count: Option, - candidate_same_file_declaration_count: Option, - candidate_declaration_count: Option, - candidate_reference_line_distance: Option, - candidate_declaration_line_distance: Option, - candidate_excerpt_vs_item_jaccard: Option, - candidate_excerpt_vs_signature_jaccard: Option, - candidate_adjacent_vs_item_jaccard: Option, - candidate_adjacent_vs_signature_jaccard: Option, - candidate_excerpt_vs_item_weighted_overlap: Option, - candidate_excerpt_vs_signature_weighted_overlap: Option, - candidate_adjacent_vs_item_weighted_overlap: Option, - candidate_adjacent_vs_signature_weighted_overlap: Option, - candidate_path_import_match_count: Option, - candidate_wildcard_path_import_match_count: Option, - candidate_import_similarity: Option, - candidate_max_import_similarity: Option, - candidate_normalized_import_similarity: Option, - candidate_wildcard_import_similarity: Option, - candidate_normalized_wildcard_import_similarity: Option, - candidate_included_by_others: Option, - candidate_includes_others: Option, - } - let mut rows = Soa::::new(); - let mut next_ref_id = 0; - - while let Some(result) = output_rx.next().await { - let mut gold_is_external = false; - let mut gold_in_excerpt = false; - let cursor_path = result.cursor_path.as_unix_str(); - let cursor_row = result.cursor_point.row + 1; - let cursor_column = result.cursor_point.column + 1; - let cursor_identifier = result.identifier.name.to_string(); - let ref_id = next_ref_id; - next_ref_id += 1; - - for lsp_definition in result.lsp_definitions { - let SourceRange { - path: gold_path, - point_range: gold_point_range, - offset_range: gold_offset_range, - } = lsp_definition; - let lsp_point_range = - SerializablePoint::into_language_point_range(gold_point_range.clone()); - - gold_is_external = gold_is_external - || gold_path.is_absolute() - || gold_path - .components() - .any(|component| component.as_os_str() == "node_modules"); - - gold_in_excerpt = gold_in_excerpt - || result.excerpt_range.as_ref().is_some_and(|excerpt_range| { - excerpt_range.contains_inclusive(&gold_offset_range) - }); - - let gold_row = gold_point_range.start.row; - let gold_column = gold_point_range.start.column; - let candidate_count = result.retrieved_definitions.len() as u32; - - for (candidate_rank, retrieved_definition) in - result.retrieved_definitions.iter().enumerate() - { - let candidate_is_gold = gold_path.as_path() - == retrieved_definition.path.as_std_path() - && retrieved_definition - .range - .contains_inclusive(&lsp_point_range); - - let candidate_row = retrieved_definition.range.start.row + 1; - let candidate_column = retrieved_definition.range.start.column + 1; - - let DeclarationScoreComponents { - is_same_file, - is_referenced_nearby, - is_referenced_in_breadcrumb, - reference_count, - same_file_declaration_count, - declaration_count, - reference_line_distance, - declaration_line_distance, - excerpt_vs_item_jaccard, - excerpt_vs_signature_jaccard, - adjacent_vs_item_jaccard, - adjacent_vs_signature_jaccard, - excerpt_vs_item_weighted_overlap, - excerpt_vs_signature_weighted_overlap, - adjacent_vs_item_weighted_overlap, - adjacent_vs_signature_weighted_overlap, - path_import_match_count, - wildcard_path_import_match_count, - import_similarity, - max_import_similarity, - normalized_import_similarity, - wildcard_import_similarity, - normalized_wildcard_import_similarity, - included_by_others, - includes_others, - } = retrieved_definition.components; - - rows.push(Row { - ref_id, - cursor_path: cursor_path.to_string(), - cursor_row, - cursor_column, - cursor_identifier: cursor_identifier.clone(), - gold_in_excerpt, - gold_path: gold_path.to_string_lossy().to_string(), - gold_row, - gold_column, - gold_is_external, - candidate_count, - candidate_path: Some(retrieved_definition.path.as_unix_str().to_string()), - candidate_row: Some(candidate_row), - candidate_column: Some(candidate_column), - candidate_is_gold: Some(candidate_is_gold), - candidate_rank: Some(candidate_rank as u32), - candidate_is_same_file: Some(is_same_file), - candidate_is_referenced_nearby: Some(is_referenced_nearby), - candidate_is_referenced_in_breadcrumb: Some(is_referenced_in_breadcrumb), - candidate_reference_count: Some(reference_count as u32), - candidate_same_file_declaration_count: Some(same_file_declaration_count as u32), - candidate_declaration_count: Some(declaration_count as u32), - candidate_reference_line_distance: Some(reference_line_distance), - candidate_declaration_line_distance: Some(declaration_line_distance), - candidate_excerpt_vs_item_jaccard: Some(excerpt_vs_item_jaccard), - candidate_excerpt_vs_signature_jaccard: Some(excerpt_vs_signature_jaccard), - candidate_adjacent_vs_item_jaccard: Some(adjacent_vs_item_jaccard), - candidate_adjacent_vs_signature_jaccard: Some(adjacent_vs_signature_jaccard), - candidate_excerpt_vs_item_weighted_overlap: Some( - excerpt_vs_item_weighted_overlap, - ), - candidate_excerpt_vs_signature_weighted_overlap: Some( - excerpt_vs_signature_weighted_overlap, - ), - candidate_adjacent_vs_item_weighted_overlap: Some( - adjacent_vs_item_weighted_overlap, - ), - candidate_adjacent_vs_signature_weighted_overlap: Some( - adjacent_vs_signature_weighted_overlap, - ), - candidate_path_import_match_count: Some(path_import_match_count as u32), - candidate_wildcard_path_import_match_count: Some( - wildcard_path_import_match_count as u32, - ), - candidate_import_similarity: Some(import_similarity), - candidate_max_import_similarity: Some(max_import_similarity), - candidate_normalized_import_similarity: Some(normalized_import_similarity), - candidate_wildcard_import_similarity: Some(wildcard_import_similarity), - candidate_normalized_wildcard_import_similarity: Some( - normalized_wildcard_import_similarity, - ), - candidate_included_by_others: Some(included_by_others as u32), - candidate_includes_others: Some(includes_others as u32), - }); - } - - if result.retrieved_definitions.is_empty() { - rows.push(Row { - ref_id, - cursor_path: cursor_path.to_string(), - cursor_row, - cursor_column, - cursor_identifier: cursor_identifier.clone(), - gold_in_excerpt, - gold_path: gold_path.to_string_lossy().to_string(), - gold_row, - gold_column, - gold_is_external, - candidate_count, - ..Default::default() - }); - } - } - } - let slices = rows.slices(); - - let RowSlices { - ref_id, - cursor_path, - cursor_row, - cursor_column, - cursor_identifier, - gold_in_excerpt, - gold_path, - gold_row, - gold_column, - gold_is_external, - candidate_path, - candidate_row, - candidate_column, - candidate_is_gold, - candidate_rank, - candidate_count, - candidate_is_same_file, - candidate_is_referenced_nearby, - candidate_is_referenced_in_breadcrumb, - candidate_reference_count, - candidate_same_file_declaration_count, - candidate_declaration_count, - candidate_reference_line_distance, - candidate_declaration_line_distance, - candidate_excerpt_vs_item_jaccard, - candidate_excerpt_vs_signature_jaccard, - candidate_adjacent_vs_item_jaccard, - candidate_adjacent_vs_signature_jaccard, - candidate_excerpt_vs_item_weighted_overlap, - candidate_excerpt_vs_signature_weighted_overlap, - candidate_adjacent_vs_item_weighted_overlap, - candidate_adjacent_vs_signature_weighted_overlap, - candidate_path_import_match_count, - candidate_wildcard_path_import_match_count, - candidate_import_similarity, - candidate_max_import_similarity, - candidate_normalized_import_similarity, - candidate_wildcard_import_similarity, - candidate_normalized_wildcard_import_similarity, - candidate_included_by_others, - candidate_includes_others, - } = slices; - - let df = DataFrame::new(vec![ - Series::new(PlSmallStr::from_str("ref_id"), ref_id).into(), - Series::new(PlSmallStr::from_str("cursor_path"), cursor_path).into(), - Series::new(PlSmallStr::from_str("cursor_row"), cursor_row).into(), - Series::new(PlSmallStr::from_str("cursor_column"), cursor_column).into(), - Series::new(PlSmallStr::from_str("cursor_identifier"), cursor_identifier).into(), - Series::new(PlSmallStr::from_str("gold_in_excerpt"), gold_in_excerpt).into(), - Series::new(PlSmallStr::from_str("gold_path"), gold_path).into(), - Series::new(PlSmallStr::from_str("gold_row"), gold_row).into(), - Series::new(PlSmallStr::from_str("gold_column"), gold_column).into(), - Series::new(PlSmallStr::from_str("gold_is_external"), gold_is_external).into(), - Series::new(PlSmallStr::from_str("candidate_count"), candidate_count).into(), - Series::new(PlSmallStr::from_str("candidate_path"), candidate_path).into(), - Series::new(PlSmallStr::from_str("candidate_row"), candidate_row).into(), - Series::new(PlSmallStr::from_str("candidate_column"), candidate_column).into(), - Series::new(PlSmallStr::from_str("candidate_is_gold"), candidate_is_gold).into(), - Series::new(PlSmallStr::from_str("candidate_rank"), candidate_rank).into(), - Series::new( - PlSmallStr::from_str("candidate_is_same_file"), - candidate_is_same_file, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_is_referenced_nearby"), - candidate_is_referenced_nearby, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_is_referenced_in_breadcrumb"), - candidate_is_referenced_in_breadcrumb, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_reference_count"), - candidate_reference_count, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_same_file_declaration_count"), - candidate_same_file_declaration_count, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_declaration_count"), - candidate_declaration_count, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_reference_line_distance"), - candidate_reference_line_distance, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_declaration_line_distance"), - candidate_declaration_line_distance, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_excerpt_vs_item_jaccard"), - candidate_excerpt_vs_item_jaccard, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_excerpt_vs_signature_jaccard"), - candidate_excerpt_vs_signature_jaccard, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_adjacent_vs_item_jaccard"), - candidate_adjacent_vs_item_jaccard, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_adjacent_vs_signature_jaccard"), - candidate_adjacent_vs_signature_jaccard, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_excerpt_vs_item_weighted_overlap"), - candidate_excerpt_vs_item_weighted_overlap, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_excerpt_vs_signature_weighted_overlap"), - candidate_excerpt_vs_signature_weighted_overlap, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_adjacent_vs_item_weighted_overlap"), - candidate_adjacent_vs_item_weighted_overlap, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_adjacent_vs_signature_weighted_overlap"), - candidate_adjacent_vs_signature_weighted_overlap, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_path_import_match_count"), - candidate_path_import_match_count, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_wildcard_path_import_match_count"), - candidate_wildcard_path_import_match_count, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_import_similarity"), - candidate_import_similarity, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_max_import_similarity"), - candidate_max_import_similarity, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_normalized_import_similarity"), - candidate_normalized_import_similarity, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_wildcard_import_similarity"), - candidate_wildcard_import_similarity, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_normalized_wildcard_import_similarity"), - candidate_normalized_wildcard_import_similarity, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_included_by_others"), - candidate_included_by_others, - ) - .into(), - Series::new( - PlSmallStr::from_str("candidate_includes_others"), - candidate_includes_others, - ) - .into(), - ])?; - - Ok(df) -} - -fn relativize_path(path: &Path) -> &Path { - path.strip_prefix(std::env::current_dir().unwrap()) - .unwrap_or(path) -} - -struct SummaryStats { - references_count: u32, - retrieved_count: u32, - top_match_count: u32, - non_top_match_count: u32, - ranking_involved_top_match_count: u32, - missing_none_retrieved: u32, - missing_wrong_retrieval: u32, - missing_external: u32, - in_excerpt_count: u32, -} - -impl SummaryStats { - fn from_dataframe(df: DataFrame) -> Result { - // TODO: use lazy more - let unique_refs = - df.unique::<(), ()>(Some(&["ref_id".into()]), UniqueKeepStrategy::Any, None)?; - let references_count = unique_refs.height() as u32; - - let gold_mask = df.column("candidate_is_gold")?.bool()?; - let gold_df = df.filter(&gold_mask)?; - let retrieved_count = gold_df.height() as u32; - - let top_match_mask = gold_df.column("candidate_rank")?.u32()?.equal(0); - let top_match_df = gold_df.filter(&top_match_mask)?; - let top_match_count = top_match_df.height() as u32; - - let ranking_involved_top_match_count = top_match_df - .column("candidate_count")? - .u32()? - .gt(1) - .sum() - .unwrap_or_default(); - - let non_top_match_count = (!top_match_mask).sum().unwrap_or(0); - - let not_retrieved_df = df - .lazy() - .group_by(&[col("ref_id"), col("candidate_count")]) - .agg(&[ - col("candidate_is_gold") - .fill_null(false) - .sum() - .alias("gold_count"), - col("gold_in_excerpt").sum().alias("gold_in_excerpt_count"), - col("gold_is_external") - .sum() - .alias("gold_is_external_count"), - ]) - .filter(col("gold_count").eq(lit(0))) - .collect()?; - - let in_excerpt_mask = not_retrieved_df - .column("gold_in_excerpt_count")? - .u32()? - .gt(0); - let in_excerpt_count = in_excerpt_mask.sum().unwrap_or(0); - - let missing_df = not_retrieved_df.filter(&!in_excerpt_mask)?; - - let missing_none_retrieved_mask = missing_df.column("candidate_count")?.u32()?.equal(0); - let missing_none_retrieved = missing_none_retrieved_mask.sum().unwrap_or(0); - let external_mask = missing_df.column("gold_is_external_count")?.u32()?.gt(0); - let missing_external = (missing_none_retrieved_mask & external_mask) - .sum() - .unwrap_or(0); - - let missing_wrong_retrieval = missing_df - .column("candidate_count")? - .u32()? - .gt(0) - .sum() - .unwrap_or(0); - - Ok(SummaryStats { - references_count, - retrieved_count, - top_match_count, - non_top_match_count, - ranking_involved_top_match_count, - missing_none_retrieved, - missing_wrong_retrieval, - missing_external, - in_excerpt_count, - }) - } - - fn count_and_percentage(part: u32, total: u32) -> String { - format!("{} ({:.2}%)", part, (part as f64 / total as f64) * 100.0) - } -} - -impl std::fmt::Display for SummaryStats { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let included = self.in_excerpt_count + self.retrieved_count; - let missing = self.references_count - included; - writeln!(f)?; - writeln!(f, "╮ references: {}", self.references_count)?; - writeln!( - f, - "├─╮ included: {}", - Self::count_and_percentage(included, self.references_count), - )?; - writeln!( - f, - "│ ├─╮ retrieved: {}", - Self::count_and_percentage(self.retrieved_count, self.references_count) - )?; - writeln!( - f, - "│ │ ├─╮ top match : {}", - Self::count_and_percentage(self.top_match_count, self.retrieved_count) - )?; - writeln!( - f, - "│ │ │ ╰─╴ involving ranking: {}", - Self::count_and_percentage(self.ranking_involved_top_match_count, self.top_match_count) - )?; - writeln!( - f, - "│ │ ╰─╴ non-top match: {}", - Self::count_and_percentage(self.non_top_match_count, self.retrieved_count) - )?; - writeln!( - f, - "│ ╰─╴ in excerpt: {}", - Self::count_and_percentage(self.in_excerpt_count, included) - )?; - writeln!( - f, - "╰─╮ missing: {}", - Self::count_and_percentage(missing, self.references_count) - )?; - writeln!( - f, - " ├─╮ none retrieved: {}", - Self::count_and_percentage(self.missing_none_retrieved, missing) - )?; - writeln!( - f, - " │ ╰─╴ external (expected): {}", - Self::count_and_percentage(self.missing_external, missing) - )?; - writeln!( - f, - " ╰─╴ wrong retrieval: {}", - Self::count_and_percentage(self.missing_wrong_retrieval, missing) - )?; - Ok(()) - } -} - -#[derive(Debug)] -struct ReferenceRetrievalResult { - cursor_path: Arc, - cursor_point: Point, - identifier: Identifier, - excerpt_range: Option>, - lsp_definitions: Vec, - retrieved_definitions: Vec, -} - -#[derive(Debug)] -struct RetrievedDefinition { - path: Arc, - range: Range, - score: f32, - #[allow(dead_code)] - retrieval_score: f32, - #[allow(dead_code)] - components: DeclarationScoreComponents, -} - -struct RetrieveResult { - definitions: Vec, - excerpt_range: Option>, -} - -async fn retrieve_definitions( - reference: &Reference, - imports: &Imports, - query_point: Point, - snapshot: &BufferSnapshot, - index: &Arc, - file_snapshots: &Arc>, - context_options: &EditPredictionContextOptions, -) -> Result { - let mut single_reference_map = HashMap::default(); - single_reference_map.insert(reference.identifier.clone(), vec![reference.clone()]); - let edit_prediction_context = EditPredictionContext::gather_context_with_references_fn( - query_point, - snapshot, - imports, - &context_options, - Some(&index), - |_, _, _| single_reference_map, - ); - - let Some(edit_prediction_context) = edit_prediction_context else { - return Ok(RetrieveResult { - definitions: Vec::new(), - excerpt_range: None, - }); - }; - - let mut retrieved_definitions = Vec::new(); - for scored_declaration in edit_prediction_context.declarations { - match &scored_declaration.declaration { - Declaration::File { - project_entry_id, - declaration, - .. - } => { - let Some(snapshot) = file_snapshots.get(&project_entry_id) else { - log::error!("bug: file project entry not found"); - continue; - }; - let path = snapshot.file().unwrap().path().clone(); - retrieved_definitions.push(RetrievedDefinition { - path, - range: snapshot.offset_to_point(declaration.item_range.start) - ..snapshot.offset_to_point(declaration.item_range.end), - score: scored_declaration.score(DeclarationStyle::Declaration), - retrieval_score: scored_declaration.retrieval_score(), - components: scored_declaration.components, - }); - } - Declaration::Buffer { - project_entry_id, - rope, - declaration, - .. - } => { - let Some(snapshot) = file_snapshots.get(&project_entry_id) else { - // This case happens when dependency buffers have been opened by - // go-to-definition, resulting in single-file worktrees. - continue; - }; - let path = snapshot.file().unwrap().path().clone(); - retrieved_definitions.push(RetrievedDefinition { - path, - range: rope.offset_to_point(declaration.item_range.start) - ..rope.offset_to_point(declaration.item_range.end), - score: scored_declaration.score(DeclarationStyle::Declaration), - retrieval_score: scored_declaration.retrieval_score(), - components: scored_declaration.components, - }); - } - } - } - retrieved_definitions.sort_by_key(|definition| Reverse(OrderedFloat(definition.score))); - - Ok(RetrieveResult { - definitions: retrieved_definitions, - excerpt_range: Some(edit_prediction_context.excerpt.range), - }) -} - -async fn gather_lsp_definitions( - lsp_definitions_path: &Path, - start_index: usize, - files: &[ProjectPath], - worktree: &Entity, - project: &Entity, - definitions: &mut HashMap>, - cx: &mut AsyncApp, -) -> Result<()> { - let worktree_id = worktree.read_with(cx, |worktree, _cx| worktree.id())?; - - let lsp_store = project.read_with(cx, |project, _cx| project.lsp_store())?; - cx.subscribe(&lsp_store, { - move |_, event, _| { - if let project::LspStoreEvent::LanguageServerUpdate { - message: - client::proto::update_language_server::Variant::WorkProgress( - client::proto::LspWorkProgress { - message: Some(message), - .. - }, - ), - .. - } = event - { - println!("⟲ {message}") - } - } - })? - .detach(); - - let (cache_line_tx, mut cache_line_rx) = mpsc::unbounded::(); - - let cache_file = File::options() - .append(true) - .create(true) - .open(lsp_definitions_path) - .unwrap(); - - let cache_task = cx.background_spawn(async move { - let mut writer = BufWriter::new(cache_file); - while let Some(line) = cache_line_rx.next().await { - serde_json::to_writer(&mut writer, &line).unwrap(); - writer.write_all(&[b'\n']).unwrap(); - } - writer.flush().unwrap(); - }); - - let mut error_count = 0; - let mut lsp_open_handles = Vec::new(); - let mut ready_languages = HashSet::default(); - for (file_index, project_path) in files[start_index..].iter().enumerate() { - println!( - "Processing file {} of {}: {}", - start_index + file_index + 1, - files.len(), - project_path.path.display(PathStyle::Posix) - ); - - let Some((lsp_open_handle, language_server_id, buffer)) = open_buffer_with_language_server( - project.clone(), - worktree.clone(), - project_path.path.clone(), - &mut ready_languages, - cx, - ) - .await - .log_err() else { - continue; - }; - lsp_open_handles.push(lsp_open_handle); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - let full_range = 0..snapshot.len(); - let references = references_in_range( - full_range, - &snapshot.text(), - ReferenceRegion::Nearby, - &snapshot, - ); - - loop { - let is_ready = lsp_store - .read_with(cx, |lsp_store, _cx| { - lsp_store - .language_server_statuses - .get(&language_server_id) - .is_some_and(|status| status.pending_work.is_empty()) - }) - .unwrap(); - if is_ready { - break; - } - cx.background_executor() - .timer(Duration::from_millis(10)) - .await; - } - - let mut cache_line_references = Vec::with_capacity(references.len()); - - for reference in references { - // TODO: Rename declaration to definition in edit_prediction_context? - let lsp_result = project - .update(cx, |project, cx| { - project.definitions(&buffer, reference.range.start, cx) - })? - .await; - - match lsp_result { - Ok(lsp_definitions) => { - let mut targets = Vec::new(); - for target in lsp_definitions.unwrap_or_default() { - let buffer = target.target.buffer; - let anchor_range = target.target.range; - buffer.read_with(cx, |buffer, cx| { - let Some(file) = project::File::from_dyn(buffer.file()) else { - return; - }; - let file_worktree = file.worktree.read(cx); - let file_worktree_id = file_worktree.id(); - // Relative paths for worktree files, absolute for all others - let path = if worktree_id != file_worktree_id { - file.worktree.read(cx).absolutize(&file.path) - } else { - file.path.as_std_path().to_path_buf() - }; - let offset_range = anchor_range.to_offset(&buffer); - let point_range = SerializablePoint::from_language_point_range( - offset_range.to_point(&buffer), - ); - targets.push(SourceRange { - path, - offset_range, - point_range, - }); - })?; - } - - let point = snapshot.offset_to_point(reference.range.start); - - cache_line_references.push((point.into(), targets.clone())); - definitions.insert( - SourceLocation { - path: project_path.path.clone(), - point, - }, - targets, - ); - } - Err(err) => { - log::error!("Language server error: {err}"); - error_count += 1; - } - } - } - - cache_line_tx - .unbounded_send(FileLspDefinitions { - path: project_path.path.as_unix_str().into(), - references: cache_line_references, - }) - .log_err(); - } - - drop(cache_line_tx); - - if error_count > 0 { - log::error!("Encountered {} language server errors", error_count); - } - - cache_task.await; - - Ok(()) -} - -#[derive(Serialize, Deserialize)] -struct FileLspDefinitions { - path: Arc, - references: Vec<(SerializablePoint, Vec)>, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct SourceRange { - path: PathBuf, - point_range: Range, - offset_range: Range, -} - -/// Serializes to 1-based row and column indices. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SerializablePoint { - pub row: u32, - pub column: u32, -} - -impl SerializablePoint { - pub fn into_language_point_range(range: Range) -> Range { - range.start.into()..range.end.into() - } - - pub fn from_language_point_range(range: Range) -> Range { - range.start.into()..range.end.into() - } -} - -impl From for SerializablePoint { - fn from(point: Point) -> Self { - SerializablePoint { - row: point.row + 1, - column: point.column + 1, - } - } -} - -impl From for Point { - fn from(serializable: SerializablePoint) -> Self { - Point { - row: serializable.row.saturating_sub(1), - column: serializable.column.saturating_sub(1), - } - } -} diff --git a/crates/zeta_cli/src/util.rs b/crates/zeta_cli/src/util.rs deleted file mode 100644 index 699c1c743f67e09ef5ca7211c385114802d4ab32..0000000000000000000000000000000000000000 --- a/crates/zeta_cli/src/util.rs +++ /dev/null @@ -1,186 +0,0 @@ -use anyhow::{Result, anyhow}; -use futures::channel::mpsc; -use futures::{FutureExt as _, StreamExt as _}; -use gpui::{AsyncApp, Entity, Task}; -use language::{Buffer, LanguageId, LanguageServerId, ParseStatus}; -use project::{Project, ProjectPath, Worktree}; -use std::collections::HashSet; -use std::sync::Arc; -use std::time::Duration; -use util::rel_path::RelPath; - -pub fn open_buffer( - project: Entity, - worktree: Entity, - path: Arc, - cx: &AsyncApp, -) -> Task>> { - cx.spawn(async move |cx| { - let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath { - worktree_id: worktree.id(), - path, - })?; - - let buffer = project - .update(cx, |project, cx| project.open_buffer(project_path, cx))? - .await?; - - let mut parse_status = buffer.read_with(cx, |buffer, _cx| buffer.parse_status())?; - while *parse_status.borrow() != ParseStatus::Idle { - parse_status.changed().await?; - } - - Ok(buffer) - }) -} - -pub async fn open_buffer_with_language_server( - project: Entity, - worktree: Entity, - path: Arc, - ready_languages: &mut HashSet, - cx: &mut AsyncApp, -) -> Result<(Entity>, LanguageServerId, Entity)> { - let buffer = open_buffer(project.clone(), worktree, path.clone(), cx).await?; - - let (lsp_open_handle, path_style) = project.update(cx, |project, cx| { - ( - project.register_buffer_with_language_servers(&buffer, cx), - project.path_style(cx), - ) - })?; - - let Some(language_id) = buffer.read_with(cx, |buffer, _cx| { - buffer.language().map(|language| language.id()) - })? - else { - return Err(anyhow!("No language for {}", path.display(path_style))); - }; - - let log_prefix = path.display(path_style); - if !ready_languages.contains(&language_id) { - wait_for_lang_server(&project, &buffer, log_prefix.into_owned(), cx).await?; - ready_languages.insert(language_id); - } - - let lsp_store = project.read_with(cx, |project, _cx| project.lsp_store())?; - - // hacky wait for buffer to be registered with the language server - for _ in 0..100 { - let Some(language_server_id) = lsp_store.update(cx, |lsp_store, cx| { - buffer.update(cx, |buffer, cx| { - lsp_store - .language_servers_for_local_buffer(&buffer, cx) - .next() - .map(|(_, language_server)| language_server.server_id()) - }) - })? - else { - cx.background_executor() - .timer(Duration::from_millis(10)) - .await; - continue; - }; - - return Ok((lsp_open_handle, language_server_id, buffer)); - } - - return Err(anyhow!("No language server found for buffer")); -} - -// TODO: Dedupe with similar function in crates/eval/src/instance.rs -pub fn wait_for_lang_server( - project: &Entity, - buffer: &Entity, - log_prefix: String, - cx: &mut AsyncApp, -) -> Task> { - println!("{}⏵ Waiting for language server", log_prefix); - - let (mut tx, mut rx) = mpsc::channel(1); - - let lsp_store = project - .read_with(cx, |project, _| project.lsp_store()) - .unwrap(); - - let has_lang_server = buffer - .update(cx, |buffer, cx| { - lsp_store.update(cx, |lsp_store, cx| { - lsp_store - .language_servers_for_local_buffer(buffer, cx) - .next() - .is_some() - }) - }) - .unwrap_or(false); - - if has_lang_server { - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) - .unwrap() - .detach(); - } - let (mut added_tx, mut added_rx) = mpsc::channel(1); - - let subscriptions = [ - cx.subscribe(&lsp_store, { - let log_prefix = log_prefix.clone(); - move |_, event, _| { - if let project::LspStoreEvent::LanguageServerUpdate { - message: - client::proto::update_language_server::Variant::WorkProgress( - client::proto::LspWorkProgress { - message: Some(message), - .. - }, - ), - .. - } = event - { - println!("{}⟲ {message}", log_prefix) - } - } - }), - cx.subscribe(project, { - let buffer = buffer.clone(); - move |project, event, cx| match event { - project::Event::LanguageServerAdded(_, _, _) => { - let buffer = buffer.clone(); - project - .update(cx, |project, cx| project.save_buffer(buffer, cx)) - .detach(); - added_tx.try_send(()).ok(); - } - project::Event::DiskBasedDiagnosticsFinished { .. } => { - tx.try_send(()).ok(); - } - _ => {} - } - }), - ]; - - cx.spawn(async move |cx| { - if !has_lang_server { - // some buffers never have a language server, so this aborts quickly in that case. - let timeout = cx.background_executor().timer(Duration::from_secs(5)); - futures::select! { - _ = added_rx.next() => {}, - _ = timeout.fuse() => { - anyhow::bail!("Waiting for language server add timed out after 5 seconds"); - } - }; - } - let timeout = cx.background_executor().timer(Duration::from_secs(60 * 5)); - let result = futures::select! { - _ = rx.next() => { - println!("{}⚑ Language server idle", log_prefix); - anyhow::Ok(()) - }, - _ = timeout.fuse() => { - anyhow::bail!("LSP wait timed out after 5 minutes"); - } - }; - drop(subscriptions); - result - }) -} diff --git a/crates/zeta_prompt/Cargo.toml b/crates/zeta_prompt/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..c9b1e2d784d10ea2fd278f70ffdae2ef0981fce0 --- /dev/null +++ b/crates/zeta_prompt/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "zeta_prompt" +version = "0.1.0" +publish.workspace = true +edition.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/zeta_prompt.rs" + +[dependencies] +serde.workspace = true \ No newline at end of file diff --git a/crates/zeta_prompt/LICENSE-GPL b/crates/zeta_prompt/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/zeta_prompt/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs new file mode 100644 index 0000000000000000000000000000000000000000..21fbca1ae10b715d0c11a31dc9390aada03fa157 --- /dev/null +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -0,0 +1,165 @@ +use serde::{Deserialize, Serialize}; +use std::fmt::Write; +use std::ops::Range; +use std::path::Path; +use std::sync::Arc; + +pub const CURSOR_MARKER: &str = "<|user_cursor|>"; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ZetaPromptInput { + pub cursor_path: Arc, + pub cursor_excerpt: Arc, + pub editable_range_in_excerpt: Range, + pub cursor_offset_in_excerpt: usize, + pub events: Vec>, + pub related_files: Arc<[RelatedFile]>, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(tag = "event")] +pub enum Event { + BufferChange { + path: Arc, + old_path: Arc, + diff: String, + predicted: bool, + in_open_source_repo: bool, + }, +} + +pub fn write_event(prompt: &mut String, event: &Event) { + fn write_path_as_unix_str(prompt: &mut String, path: &Path) { + for component in path.components() { + prompt.push('/'); + write!(prompt, "{}", component.as_os_str().display()).ok(); + } + } + match event { + Event::BufferChange { + path, + old_path, + diff, + predicted, + in_open_source_repo: _, + } => { + if *predicted { + prompt.push_str("// User accepted prediction:\n"); + } + prompt.push_str("--- a"); + write_path_as_unix_str(prompt, old_path.as_ref()); + prompt.push_str("\n+++ b"); + write_path_as_unix_str(prompt, path.as_ref()); + prompt.push('\n'); + prompt.push_str(diff); + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RelatedFile { + pub path: Arc, + pub max_row: u32, + pub excerpts: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RelatedExcerpt { + pub row_range: Range, + pub text: String, +} + +pub fn format_zeta_prompt(input: &ZetaPromptInput) -> String { + let mut prompt = String::new(); + write_related_files(&mut prompt, &input.related_files); + write_edit_history_section(&mut prompt, input); + write_cursor_excerpt_section(&mut prompt, input); + prompt +} + +pub fn write_related_files(prompt: &mut String, related_files: &[RelatedFile]) { + push_delimited(prompt, "related_files", &[], |prompt| { + for file in related_files { + let path_str = file.path.to_string_lossy(); + push_delimited(prompt, "related_file", &[("path", &path_str)], |prompt| { + for excerpt in &file.excerpts { + push_delimited( + prompt, + "related_excerpt", + &[( + "lines", + &format!( + "{}-{}", + excerpt.row_range.start + 1, + excerpt.row_range.end + 1 + ), + )], + |prompt| { + prompt.push_str(&excerpt.text); + prompt.push('\n'); + }, + ); + } + }); + } + }); +} + +fn write_edit_history_section(prompt: &mut String, input: &ZetaPromptInput) { + push_delimited(prompt, "edit_history", &[], |prompt| { + if input.events.is_empty() { + prompt.push_str("(No edit history)"); + } else { + for event in &input.events { + write_event(prompt, event); + } + } + }); +} + +fn write_cursor_excerpt_section(prompt: &mut String, input: &ZetaPromptInput) { + push_delimited(prompt, "cursor_excerpt", &[], |prompt| { + let path_str = input.cursor_path.to_string_lossy(); + push_delimited(prompt, "file", &[("path", &path_str)], |prompt| { + prompt.push_str(&input.cursor_excerpt[..input.editable_range_in_excerpt.start]); + push_delimited(prompt, "editable_region", &[], |prompt| { + prompt.push_str( + &input.cursor_excerpt + [input.editable_range_in_excerpt.start..input.cursor_offset_in_excerpt], + ); + prompt.push_str(CURSOR_MARKER); + prompt.push_str( + &input.cursor_excerpt + [input.cursor_offset_in_excerpt..input.editable_range_in_excerpt.end], + ); + }); + prompt.push_str(&input.cursor_excerpt[input.editable_range_in_excerpt.end..]); + }); + }); +} + +fn push_delimited( + prompt: &mut String, + tag: &'static str, + arguments: &[(&str, &str)], + cb: impl FnOnce(&mut String), +) { + if !prompt.ends_with("\n") { + prompt.push('\n'); + } + prompt.push('<'); + prompt.push_str(tag); + for (arg_name, arg_value) in arguments { + write!(prompt, " {}=\"{}\"", arg_name, arg_value).ok(); + } + prompt.push_str(">\n"); + + cb(prompt); + + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + prompt.push_str("\n"); +} diff --git a/crates/zlog/README.md b/crates/zlog/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6d0fef147cb0fb300e5a4cfd3936a97d0ee111fc --- /dev/null +++ b/crates/zlog/README.md @@ -0,0 +1,15 @@ +# Zlog + +Use the `ZED_LOG` environment variable to control logging output for Zed +applications and libraries. The variable accepts a comma-separated list of +directives that specify logging levels for different modules (crates). The +general format is for instance: + +``` +ZED_LOG=info,project=debug,agent=off +``` + +- Levels can be one of: `off`/`none`, `error`, `warn`, `info`, `debug`, or + `trace`. +- You don't need to specify the global level, default is `trace` in the crate + and `info` set by `RUST_LOG` in Zed. diff --git a/crates/zlog/src/filter.rs b/crates/zlog/src/filter.rs index e2ca04be60f4fe7eba7cdb2fc9eb983092d2331a..0be6f4ead5bf64aa47f7a60391bf377c9998cfb4 100644 --- a/crates/zlog/src/filter.rs +++ b/crates/zlog/src/filter.rs @@ -5,12 +5,12 @@ use std::sync::{ atomic::{AtomicU8, Ordering}, }; -use crate::{SCOPE_DEPTH_MAX, SCOPE_STRING_SEP_STR, Scope, ScopeAlloc, env_config, private}; +use crate::{SCOPE_DEPTH_MAX, SCOPE_STRING_SEP_STR, ScopeAlloc, ScopeRef, env_config, private}; use log; static ENV_FILTER: OnceLock = OnceLock::new(); -static SCOPE_MAP: RwLock> = RwLock::new(None); +static SCOPE_MAP: RwLock = RwLock::new(ScopeMap::empty()); pub const LEVEL_ENABLED_MAX_DEFAULT: log::LevelFilter = log::LevelFilter::Info; /// The maximum log level of verbosity that is enabled by default. @@ -59,7 +59,11 @@ pub fn is_possibly_enabled_level(level: log::Level) -> bool { level as u8 <= LEVEL_ENABLED_MAX_CONFIG.load(Ordering::Acquire) } -pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Level) -> bool { +pub fn is_scope_enabled( + scope: &ScopeRef<'_>, + module_path: Option<&str>, + level: log::Level, +) -> bool { // TODO: is_always_allowed_level that checks against LEVEL_ENABLED_MIN_CONFIG if !is_possibly_enabled_level(level) { // [FAST PATH] @@ -74,16 +78,11 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le err.into_inner() }); - let Some(map) = global_scope_map.as_ref() else { - // on failure, return false because it's not <= LEVEL_ENABLED_MAX_STATIC - return is_enabled_by_default; - }; - - if map.is_empty() { + if global_scope_map.is_empty() { // if no scopes are enabled, return false because it's not <= LEVEL_ENABLED_MAX_STATIC return is_enabled_by_default; } - let enabled_status = map.is_enabled(scope, module_path, level); + let enabled_status = global_scope_map.is_enabled(scope, module_path, level); match enabled_status { EnabledStatus::NotConfigured => is_enabled_by_default, EnabledStatus::Enabled => true, @@ -107,7 +106,7 @@ pub fn refresh_from_settings(settings: &HashMap) { SCOPE_MAP.clear_poison(); err.into_inner() }); - global_map.replace(map_new); + *global_map = map_new; } log::trace!("Log configuration updated"); } @@ -395,12 +394,21 @@ impl ScopeMap { } EnabledStatus::NotConfigured } + + const fn empty() -> ScopeMap { + ScopeMap { + entries: vec![], + modules: vec![], + root_count: 0, + } + } } #[cfg(test)] mod tests { use log::LevelFilter; + use crate::Scope; use crate::private::scope_new; use super::*; diff --git a/crates/zlog/src/sink.rs b/crates/zlog/src/sink.rs index 303e3139bc7cdb95ae01c7e87fff8f9bc6d100c2..07e87be1b071f2538e716bb8fd2b692527363fc4 100644 --- a/crates/zlog/src/sink.rs +++ b/crates/zlog/src/sink.rs @@ -8,7 +8,7 @@ use std::{ }, }; -use crate::{SCOPE_STRING_SEP_CHAR, Scope}; +use crate::{SCOPE_STRING_SEP_CHAR, ScopeRef}; // ANSI color escape codes for log levels const ANSI_RESET: &str = "\x1b[0m"; @@ -35,7 +35,7 @@ static SINK_FILE_SIZE_BYTES: AtomicU64 = AtomicU64::new(0); const SINK_FILE_SIZE_BYTES_MAX: u64 = 1024 * 1024; // 1 MB pub struct Record<'a> { - pub scope: Scope, + pub scope: ScopeRef<'a>, pub level: log::Level, pub message: &'a std::fmt::Arguments<'a>, pub module_path: Option<&'a str>, @@ -208,7 +208,7 @@ pub fn flush() { } struct SourceFmt<'a> { - scope: Scope, + scope: ScopeRef<'a>, module_path: Option<&'a str>, line: Option, ansi: bool, diff --git a/crates/zlog/src/zlog.rs b/crates/zlog/src/zlog.rs index 04f58e91025b9598038a075c470eb4750d92e4cf..3c154f790845da74dcf3a4f9bfdd830d2d32c9ec 100644 --- a/crates/zlog/src/zlog.rs +++ b/crates/zlog/src/zlog.rs @@ -70,15 +70,18 @@ impl log::Log for Zlog { if !self.enabled(record.metadata()) { return; } - let (crate_name_scope, module_scope) = match record.module_path_static() { + let module_path = record.module_path().or(record.file()); + let (crate_name_scope, module_scope) = match module_path { Some(module_path) => { let crate_name = private::extract_crate_name_from_module_path(module_path); - let crate_name_scope = private::scope_new(&[crate_name]); - let module_scope = private::scope_new(&[module_path]); + let crate_name_scope = private::scope_ref_new(&[crate_name]); + let module_scope = private::scope_ref_new(&[module_path]); (crate_name_scope, module_scope) } - // TODO: when do we hit this - None => (private::scope_new(&[]), private::scope_new(&["*unknown*"])), + None => { + // TODO: when do we hit this + (private::scope_new(&[]), private::scope_new(&["*unknown*"])) + } }; let level = record.metadata().level(); if !filter::is_scope_enabled(&crate_name_scope, Some(record.target()), level) { @@ -89,7 +92,7 @@ impl log::Log for Zlog { level, message: record.args(), // PERF(batching): store non-static paths in a cache + leak them and pass static str here - module_path: record.module_path().or(record.file()), + module_path, line: record.line(), }); } @@ -183,7 +186,7 @@ macro_rules! time { $crate::Timer::new($logger, $name) }; ($name:expr) => { - time!($crate::default_logger!() => $name) + $crate::time!($crate::default_logger!() => $name) }; } @@ -252,6 +255,10 @@ pub mod private { } pub const fn scope_new(scopes: &[&'static str]) -> Scope { + scope_ref_new(scopes) + } + + pub const fn scope_ref_new<'a>(scopes: &[&'a str]) -> ScopeRef<'a> { assert!(scopes.len() <= SCOPE_DEPTH_MAX); let mut scope = [""; SCOPE_DEPTH_MAX]; let mut i = 0; @@ -275,6 +282,7 @@ pub mod private { } pub type Scope = [&'static str; SCOPE_DEPTH_MAX]; +pub type ScopeRef<'a> = [&'a str; SCOPE_DEPTH_MAX]; pub type ScopeAlloc = [String; SCOPE_DEPTH_MAX]; const SCOPE_STRING_SEP_STR: &str = "."; const SCOPE_STRING_SEP_CHAR: char = '.'; diff --git a/crates/ztracing/Cargo.toml b/crates/ztracing/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..0d9f15b9afccca4a1a05036c013562c8ad1ae8f4 --- /dev/null +++ b/crates/ztracing/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "ztracing" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +tracy = ["tracing-tracy"] + +[dependencies] +zlog.workspace = true +tracing.workspace = true + +tracing-subscriber = "0.3.22" +tracing-tracy = { version = "0.11.4", optional = true, features = ["enable", "ondemand"] } + +ztracing_macro.workspace = true diff --git a/crates/ztracing/LICENSE-AGPL b/crates/ztracing/LICENSE-AGPL new file mode 120000 index 0000000000000000000000000000000000000000..5f5cf25dc458e75f4050c7378c186fca9b68fd19 --- /dev/null +++ b/crates/ztracing/LICENSE-AGPL @@ -0,0 +1 @@ +../../LICENSE-AGPL \ No newline at end of file diff --git a/crates/semantic_version/LICENSE-APACHE b/crates/ztracing/LICENSE-APACHE similarity index 100% rename from crates/semantic_version/LICENSE-APACHE rename to crates/ztracing/LICENSE-APACHE diff --git a/crates/ztracing/LICENSE-GPL b/crates/ztracing/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/ztracing/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ztracing/build.rs b/crates/ztracing/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..dc0d0ad704d49c4c0ab639d769024330e10d2481 --- /dev/null +++ b/crates/ztracing/build.rs @@ -0,0 +1,9 @@ +use std::env; + +fn main() { + if env::var_os("ZTRACING").is_some() { + println!(r"cargo::rustc-cfg=ztracing"); + } + println!("cargo::rerun-if-changed=build.rs"); + println!("cargo::rerun-if-env-changed=ZTRACING"); +} diff --git a/crates/ztracing/src/lib.rs b/crates/ztracing/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..c9007be1ed43150ef877d51c882aee77845e5bd6 --- /dev/null +++ b/crates/ztracing/src/lib.rs @@ -0,0 +1,58 @@ +pub use tracing::{Level, field}; + +#[cfg(ztracing)] +pub use tracing::{ + Span, debug_span, error_span, event, info_span, instrument, span, trace_span, warn_span, +}; +#[cfg(not(ztracing))] +pub use ztracing_macro::instrument; + +#[cfg(not(ztracing))] +pub use __consume_all_tokens as trace_span; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as info_span; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as debug_span; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as warn_span; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as error_span; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as event; +#[cfg(not(ztracing))] +pub use __consume_all_tokens as span; + +#[cfg(not(ztracing))] +#[macro_export] +macro_rules! __consume_all_tokens { + ($($t:tt)*) => { + $crate::Span + }; +} + +#[cfg(not(ztracing))] +pub struct Span; + +#[cfg(not(ztracing))] +impl Span { + pub fn current() -> Self { + Self + } + + pub fn enter(&self) {} + + pub fn record(&self, _t: T, _s: S) {} +} + +#[cfg(ztracing)] +pub fn init() { + zlog::info!("Starting tracy subscriber, you can now connect the profiler"); + use tracing_subscriber::prelude::*; + tracing::subscriber::set_global_default( + tracing_subscriber::registry().with(tracing_tracy::TracyLayer::default()), + ) + .expect("setup tracy layer"); +} + +#[cfg(not(ztracing))] +pub fn init() {} diff --git a/crates/ztracing_macro/Cargo.toml b/crates/ztracing_macro/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..dbd7adce5fccd054c3dc87acaf1283e9e7c36889 --- /dev/null +++ b/crates/ztracing_macro/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "ztracing_macro" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lib] +proc-macro = true + +[dependencies] diff --git a/crates/ztracing_macro/LICENSE-AGPL b/crates/ztracing_macro/LICENSE-AGPL new file mode 120000 index 0000000000000000000000000000000000000000..5f5cf25dc458e75f4050c7378c186fca9b68fd19 --- /dev/null +++ b/crates/ztracing_macro/LICENSE-AGPL @@ -0,0 +1 @@ +../../LICENSE-AGPL \ No newline at end of file diff --git a/crates/ztracing_macro/LICENSE-APACHE b/crates/ztracing_macro/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/crates/ztracing_macro/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/ztracing_macro/LICENSE-GPL b/crates/ztracing_macro/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/ztracing_macro/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ztracing_macro/src/lib.rs b/crates/ztracing_macro/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..d9b073ed130bdc829e4d5d943b6d4b6a6d802888 --- /dev/null +++ b/crates/ztracing_macro/src/lib.rs @@ -0,0 +1,7 @@ +#[proc_macro_attribute] +pub fn instrument( + _attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + item +} diff --git a/docs/.rules b/docs/.rules new file mode 100644 index 0000000000000000000000000000000000000000..4e6ca312f13b12a54a73d736ffeed8a8e09061ef --- /dev/null +++ b/docs/.rules @@ -0,0 +1,158 @@ +# Zed Documentation Guidelines + +## Voice and Tone + +### Core Principles + +- **Practical over promotional**: Focus on what users can do, not on selling Zed. Avoid marketing language like "powerful," "revolutionary," or "best-in-class." +- **Honest about limitations**: When Zed lacks a feature or doesn't match another tool's depth, say so directly. Pair limitations with workarounds or alternative workflows. +- **Direct and concise**: Use short sentences. Get to the point. Developers are scanning, not reading novels. +- **Second person**: Address the reader as "you." Avoid "the user" or "one." +- **Present tense**: "Zed opens the file" not "Zed will open the file." + +### What to Avoid + +- Superlatives without substance ("incredibly fast," "seamlessly integrated") +- Hedging language ("simply," "just," "easily")—if something is simple, the instructions will show it +- Apologetic tone for missing features—state the limitation and move on +- Comparisons that disparage other tools—be factual, not competitive +- Meta-commentary about honesty ("the honest take is...", "to be frank...", "honestly...")—let honesty show through frank assessments, not announcements +- LLM-isms and filler words ("entirely," "certainly,", "deeply," "definitely," "actually")—these add nothing + +## Content Structure + +### Page Organization + +1. **Start with the goal**: Open with what the reader will accomplish, not background +2. **Front-load the action**: Put the most common task first, edge cases later +3. **Use headers liberally**: Readers scan; headers help them find what they need +4. **End with "what's next"**: Link to related docs or logical next steps + +### Section Patterns + +For how-to content: +1. Brief context (1-2 sentences max) +2. Steps or instructions +3. Example (code block or screenshot reference) +4. Tips or gotchas (if any) + +For reference content: +1. What it is (definition) +2. How to access/configure it +3. Options/parameters table +4. Examples + +## Formatting Conventions + +### Keybindings + +- Use backticks for key combinations: `Cmd+Shift+P` +- Show both macOS and Linux/Windows when they differ: `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +- Use `+` to join simultaneous keys, space for sequences: `Cmd+K Cmd+C` + +### Code and Settings + +- Inline code for setting names, file paths, commands: `format_on_save`, `.zed/settings.json`, `zed .` +- Code blocks for JSON config, multi-line commands, or file contents +- Always show complete, working examples—not fragments + +### Terminal Commands + +Use `sh` code blocks for terminal commands, not plain backticks: + +```sh +brew install zed-editor/zed/zed +``` + +Not: +``` +brew install zed-editor/zed/zed +``` + +For single inline commands in prose, backticks are fine: `zed .` + +### Tables + +Use tables for: +- Keybinding comparisons between editors +- Settings mappings (e.g., VS Code → Zed) +- Feature comparisons with clear columns + +Format: +``` +| Action | Shortcut | Notes | +| --- | --- | --- | +| Open File | `Cmd+O` | Works from any context | +``` + +### Tips and Notes + +Use blockquote format with bold label: +``` +> **Tip:** Practical advice that helps bridge gaps or saves time. +``` + +Reserve tips for genuinely useful information, not padding. + +## Writing Guidelines + +### Settings Documentation + +- **Settings Editor first**: Show how to find and change settings in the UI before showing JSON +- **JSON as secondary**: Present JSON examples as "Or add this to your settings.json" for users who prefer direct editing +- **Complete examples**: Include the full JSON structure, not just the value + +### Migration Guides + +- **Jobs to be done**: Frame around tasks ("How do I search files?") not features ("File Search Feature") +- **Acknowledge the source**: Respect that users have muscle memory and preferences from their previous editor +- **Keybindings tables**: Essential for migration docs—show what maps, what's different, what's missing +- **Trade-offs section**: Be explicit about what the user gains and loses in the switch + +### Feature Documentation + +- **Start with the default**: Document the out-of-box experience first +- **Configuration options**: Group related settings together +- **Cross-link generously**: Link to related features, settings reference, and relevant guides + +## Terminology + +| Use | Instead of | +| --- | --- | +| folder | directory (in user-facing text) | +| project | workspace (Zed doesn't have workspaces) | +| Settings Editor | settings UI, preferences | +| command palette | command bar, action search | +| language server | LSP (spell out first use, then LSP is fine) | +| panel | tool window, sidebar (be specific: "Project Panel," "Terminal Panel") | + +## Examples + +### Good: Direct and actionable +``` +To format on save, open the Settings Editor (`Cmd+,`) and search for `format_on_save`. Set it to `on`. + +Or add this to your settings.json: +{ + "format_on_save": "on" +} +``` + +### Bad: Wordy and promotional +``` +Zed provides a powerful and seamless formatting experience. Simply navigate to the settings and you'll find the format_on_save option which enables Zed's incredible auto-formatting capabilities. +``` + +### Good: Honest about limitations +``` +Zed doesn't index your project like IntelliJ does. You open a folder and start working immediately—no waiting. The trade-off: cross-project analysis relies on language servers, which may not go as deep. + +**How to adapt:** +- Use `Cmd+Shift+F` for project-wide text search +- Use `Cmd+O` for symbol search (powered by your language server) +``` + +### Bad: Defensive or dismissive +``` +While some users might miss indexing, Zed's approach is actually better because it's faster. +``` diff --git a/docs/README.md b/docs/README.md index d843e8d096867bf67466180aabb1af19549ffe79..e1649f4bc99e1668352a46ee2071dcfe1775f4a7 100644 --- a/docs/README.md +++ b/docs/README.md @@ -20,10 +20,9 @@ cd docs && pnpm dlx prettier@3.5.0 . --write && cd .. ## Preprocessor -We have a custom mdbook preprocessor for interfacing with our crates (`crates/docs_preprocessor`). +We have a custom mdBook preprocessor for interfacing with our crates (`crates/docs_preprocessor`). -If for some reason you need to bypass the docs preprocessor, you can comment out `[preprocessor.zed_docs_preprocessor] -` from the `book.toml`.: +If for some reason you need to bypass the docs preprocessor, you can comment out `[preprocessor.zed_docs_preprocessor]` from the `book.toml`. ## Images and videos @@ -34,7 +33,7 @@ Putting binary assets such as images in the Git repository will bloat the reposi ## Internal notes: - We have a Cloudflare router called `docs-proxy` that intercepts requests to `zed.dev/docs` and forwards them to the "docs" Cloudflare Pages project. -- CI uploads a new version to the Pages project from `.github/workflows/deploy_docs.yml` on every push to `main`. +- The CI uploads a new version to the Cloudflare Pages project from `.github/workflows/deploy_docs.yml` on every push to `main`. ### Table of Contents @@ -46,15 +45,15 @@ Since all this preprocessor does is generate the static assets, we don't need to When referencing keybindings or actions, use the following formats: -### Keybindings: +### Keybindings `{#kb scope::Action}` - e.g., `{#kb zed::OpenSettings}`. -This will output a code element like: `Cmd+,|Ctrl+,`. We then use a client-side plugin to show the actual keybinding based on the user's platform. +This will output a code element like: `Cmd + , | Ctrl + ,`. We then use a client-side plugin to show the actual keybinding based on the user's platform. By using the action name, we can ensure that the keybinding is always up-to-date rather than hardcoding the keybinding. -### Actions: +### Actions `{#action scope::Action}` - e.g., `{#action zed::OpenSettings}`. @@ -62,19 +61,20 @@ This will render a human-readable version of the action name, e.g., "zed: open s ### Creating New Templates -Templates are just functions that modify the source of the docs pages (usually with a regex match & replace). You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates. +Templates are functions that modify the source of the docs pages (usually with a regex match and replace). +You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates. ### References -- Template Trait: crates/docs_preprocessor/src/templates.rs -- Example template: crates/docs_preprocessor/src/templates/keybinding.rs -- Client-side plugins: docs/theme/plugins.js +- Template Trait: `crates/docs_preprocessor/src/templates.rs` +- Example template: `crates/docs_preprocessor/src/templates/keybinding.rs` +- Client-side plugins: `docs/theme/plugins.js` ## Postprocessor -A postprocessor is implemented as a sub-command of `docs_preprocessor` that wraps the builtin `html` renderer and applies post-processing to the `html` files, to add support for page-specific title and meta description values. +A postprocessor is implemented as a sub-command of `docs_preprocessor` that wraps the built-in HTML renderer and applies post-processing to the HTML files, to add support for page-specific title and `meta` tag description values. -An example of the syntax can be found in `git.md`, as well as below +An example of the syntax can be found in `git.md`, as well as below: ```md --- @@ -85,7 +85,7 @@ description: A page-specific description # Editor ``` -The above will be transformed into (with non-relevant tags removed) +The above code will be transformed into (with non-relevant tags removed): ```html @@ -97,15 +97,16 @@ The above will be transformed into (with non-relevant tags removed) ``` -If no front-matter is provided, or If one or both keys aren't provided, the title and description will be set based on the `default-title` and `default-description` keys in `book.toml` respectively. +If no front matter is provided, or if one or both keys aren't provided, the `title` and `description` will be set based on the `default-title` and `default-description` keys in `book.toml` respectively. ### Implementation details -Unfortunately, `mdbook` does not support post-processing like it does pre-processing, and only supports defining one description to put in the meta tag per book rather than per file. So in order to apply post-processing (necessary to modify the html head tags) the global book description is set to a marker value `#description#` and the html renderer is replaced with a sub-command of `docs_preprocessor` that wraps the builtin `html` renderer and applies post-processing to the `html` files, replacing the marker value and the `(.*)` with the contents of the front-matter if there is one. +Unfortunately, mdBook does not support post-processing like it does pre-processing, and only supports defining one description to put in the `meta` tag per book rather than per file. +So in order to apply post-processing (necessary to modify the HTML `head` tags) the global book description is set to a marker value `#description#` and the HTML renderer is replaced with a sub-command of `docs_preprocessor` that wraps the built-in HTML renderer and applies post-processing to the HTML files, replacing the marker value and the `(.*)` with the contents of the front matter if there is one. ### Known limitations -The front-matter parsing is extremely simple, which avoids needing to take on an additional dependency, or implement full yaml parsing. +The front matter parsing is extremely simple, which avoids needing to take on an additional dependency, or implement full YAML parsing. - Double quotes and multi-line values are not supported, i.e. Keys and values must be entirely on the same line, with no double quotes around the value. @@ -119,7 +120,7 @@ title: Some --- ``` -And neither will: +neither this: ```md --- @@ -127,6 +128,5 @@ title: "Some title" --- ``` -- The front-matter must be at the top of the file, with only white-space preceding it - -- The contents of the title and description will not be html-escaped. They should be simple ascii text with no unicode or emoji characters +- The front matter must be at the top of the file, with only white-space preceding it. +- The contents of the `title` and `description` will not be HTML escaped. They should be simple ASCII text with no unicode or emoji characters. diff --git a/docs/book.toml b/docs/book.toml index 60ddc5ac515cb73f7b0b4f2f8c2c193bdddf228b..2bb57c5c08ea2421aa9b8a2fb47fdc9521d32a39 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -56,6 +56,10 @@ enable = false "/model-improvement.html" = "/docs/ai/ai-improvement.html" "/ai/temperature.html" = "/docs/ai/agent-settings.html#model-temperature" +# Collaboration +"/channels.html" = "/docs/collaboration/channels.html" +"/collaboration.html" = "/docs/collaboration/overview.html" + # Community "/community/feedback.html" = "/community-links" "/conversations.html" = "/community-links" diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index c57802afa61cde6bc0ae6998c95c7980e7c60d64..1f9c5750ea76b35a2f7f5464b7b6684401108d2b 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -1,6 +1,6 @@ # Summary -# Getting Started +# Welcome - [Getting Started](./getting-started.md) - [Installation](./installation.md) @@ -23,6 +23,9 @@ - [Visual Customization](./visual-customization.md) - [Vim Mode](./vim.md) - [Helix Mode](./helix.md) +- [Privacy and Security](./ai/privacy-and-security.md) + - [Worktree Trust](./worktree-trust.md) + - [AI Improvement](./ai/ai-improvement.md) @@ -34,13 +37,16 @@ - [Command-line Interface](./command-line-interface.md) - [Outline Panel](./outline-panel.md) - [Code Completions](./completions.md) -- [Channels](./channels.md) -- [Collaboration](./collaboration.md) +- [Collaboration](./collaboration/overview.md) + - [Channels](./collaboration/channels.md) + - [Contacts and Private Calls](./collaboration/contacts-and-private-calls.md) - [Git](./git.md) - [Debugger](./debugger.md) - [Diagnostics](./diagnostics.md) - [Tasks](./tasks.md) +- [Tab Switcher](./tab-switcher.md) - [Remote Development](./remote-development.md) +- [Dev Containers](./dev-containers.md) - [Environment Variables](./environment.md) - [REPL](./repl.md) @@ -67,8 +73,6 @@ - [Models](./ai/models.md) - [Plans and Usage](./ai/plans-and-usage.md) - [Billing](./ai/billing.md) -- [Privacy and Security](./ai/privacy-and-security.md) - - [AI Improvement](./ai/ai-improvement.md) # Extensions @@ -84,6 +88,14 @@ - [Agent Server Extensions](./extensions/agent-servers.md) - [MCP Server Extensions](./extensions/mcp-extensions.md) +# Coming From... + +- [VS Code](./migrate/vs-code.md) +- [IntelliJ IDEA](./migrate/intellij.md) +- [PyCharm](./migrate/pycharm.md) +- [WebStorm](./migrate/webstorm.md) +- [RustRover](./migrate/rustrover.md) + # Language Support - [All Languages](./languages.md) @@ -126,6 +138,7 @@ - [Markdown](./languages/markdown.md) - [Nim](./languages/nim.md) - [OCaml](./languages/ocaml.md) +- [OpenTofu](./languages/opentofu.md) - [PHP](./languages/php.md) - [PowerShell](./languages/powershell.md) - [Prisma](./languages/prisma.md) @@ -166,6 +179,7 @@ - [FreeBSD](./development/freebsd.md) - [Local Collaboration](./development/local-collaboration.md) - [Using Debuggers](./development/debuggers.md) + - [Performance](./performance.md) - [Glossary](./development/glossary.md) - [Release Notes](./development/release-notes.md) - [Debugging Crashes](./development/debugging-crashes.md) diff --git a/docs/src/ai/ai-improvement.md b/docs/src/ai/ai-improvement.md index 6d7fe8fdb172afa17f494723bb16b1cc69c9336c..857ca2c0efa14e9a7d465f7998310808e0e5237b 100644 --- a/docs/src/ai/ai-improvement.md +++ b/docs/src/ai/ai-improvement.md @@ -20,13 +20,9 @@ When using upstream services through Zed's hosted models, we require assurances | Provider | No Training Guarantee | Zero-Data Retention (ZDR) | | --------- | ------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | | Anthropic | [Yes](https://www.anthropic.com/legal/commercial-terms) | [Yes](https://privacy.anthropic.com/en/articles/8956058-i-have-a-zero-data-retention-agreement-with-anthropic-what-products-does-it-apply-to) | -| Google | [Yes](https://cloud.google.com/terms/service-terms) | **No**, in flight | +| Google | [Yes](https://cloud.google.com/terms/service-terms) | [Yes](https://cloud.google.com/terms/service-terms), see Service Terms sections 17 and 19h | | OpenAI | [Yes](https://openai.com/enterprise-privacy/) | [Yes](https://platform.openai.com/docs/guides/your-data) | -> Zed's use of Gemini models is currently supported via [Google AI Studio](https://ai.google.dev/aistudio), which **_does not_** support ZDR. We're migrating to [Vertex AI](https://cloud.google.com/vertex-ai?hl=en), which **_does_**, and upon completion of that migration will offer ZDR to all users of Zed's hosted Google/Gemini models. - -> If ZDR from upstream model providers is important to you, _please do not use Gemini models at this time_. Your data will never be used for training purposes by any model providers hosted by Zed, however. - When you use your own API keys or external agents, **Zed does not have control over how your data is used by that service provider.** You should reference your agreement with each service provider to understand what terms and conditions apply. diff --git a/docs/src/ai/billing.md b/docs/src/ai/billing.md index 64ff871ce1b629fad72d4ddd6f9c8f42f2bf92da..788c0c1cf7cb0bfd64bdd83812e1e62bf51abf88 100644 --- a/docs/src/ai/billing.md +++ b/docs/src/ai/billing.md @@ -5,7 +5,7 @@ For invoice-based billing, a Business plan is required. Contact [sales@zed.dev]( ## Billing Information {#settings} -You can access billing information and settings at [zed.dev/account](https://zed.dev/account). +You can access billing information and settings at [dashboard.zed.dev/account](https://dashboard.zed.dev/account). Most of the page embeds information from our invoicing/metering partner, Orb (we're planning on a more native experience soon!). ## Billing Cycles {#billing-cycles} @@ -28,7 +28,7 @@ If payment of an invoice fails, Zed will block usage of our hosted models until ## Invoice History {#invoice-history} -You can access your invoice history by navigating to [zed.dev/account](https://zed.dev/account) and clicking `Invoice history` within the embedded Orb portal. +You can access your invoice history by navigating to [dashboard.zed.dev/account](https://dashboard.zed.dev/account) and clicking `Invoice history` within the embedded Orb portal. If you require historical Stripe invoices, email [billing-support@zed.dev](mailto:billing-support@zed.dev) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index fd073d558ea9f3a2027c5a705320f8554fef7c78..65a427842cda461806dc79ecf67f3a180afd9763 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -1,19 +1,30 @@ # Edit Prediction -Edit Prediction is Zed's mechanism for predicting the code you want to write through AI. +Edit Prediction is Zed's LLM mechanism for predicting the code you want to write. Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`. -The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../authentication.md#what-features-require-signing-in). -Alternatively, you can also use [other providers](#other-providers) like GitHub Copilot and Codestral. +The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), but you can also use [other providers](#other-providers) like GitHub Copilot, Supermaven, and Codestral. ## Configuring Zeta -Zed's Edit Prediction was initially introduced via a banner on the title bar. -Clicking on it would take you to a modal with a button ("Enable Edit Prediction") that sets `zed` as your `edit_prediction_provider`. +To use Zeta, the only thing you need to do is [to sign in](../authentication.md#what-features-require-signing-in). +After doing that, you should already see predictions as you type on your files. -![Onboarding banner and modal](https://zed.dev/img/edit-prediction/docs.webp) +You can confirm that Zeta is properly configured either by verifying whether you have the following code in your `settings.json`: -But, if you haven't come across the banner, Zed's Edit Prediction is the default edit prediction provider and you should see it right away in your status bar. +```json [settings] +"features": { + "edit_prediction_provider": "zed" +}, +``` + +Or you can also look for a little Z icon in the right of your status bar at the bottom. + +### Pricing and Plans + +From just signing in, while in Zed's free plan, you get 2,000 Zeta-powered edit predictions per month. +But you can get _**unlimited edit predictions**_ by upgrading to [the Pro plan](../ai/plans-and-usage.md). +More information can be found in [Zed's pricing page](https://zed.dev/pricing). ### Switching Modes {#switching-modes} @@ -34,6 +45,8 @@ Or directly via the UI through the status bar menu: ![Edit Prediction status bar menu, with the modes toggle.](https://zed.dev/img/edit-prediction/status-bar-menu.webp) +> Note that edit prediction modes work with any prediction provider. + ### Conflict With Other `tab` Actions {#edit-predictions-conflict} By default, when `tab` would normally perform a different action, Zed requires a modifier key to accept predictions: @@ -45,9 +58,8 @@ In these cases, `alt-tab` is used instead to accept the prediction. When the lan On Linux, `alt-tab` is often used by the window manager for switching windows, so `alt-l` is provided as the default binding for accepting predictions. `tab` and `alt-tab` also work, but aren't displayed by default. -{#action editor::AcceptPartialEditPrediction} ({#kb editor::AcceptPartialEditPrediction}) can be used to accept the current edit prediction up to the next word boundary. - -See the [Configuring GitHub Copilot](#github-copilot) and [Configuring Supermaven](#supermaven) sections below for configuration of other providers. Only text insertions at the current cursor are supported for these providers, whereas the Zeta model provides multiple predictions including deletions. +{#action editor::AcceptNextWordEditPrediction} ({#kb editor::AcceptNextWordEditPrediction}) can be used to accept the current edit prediction up to the next word boundary. +{#action editor::AcceptNextLineEditPrediction} ({#kb editor::AcceptNextLineEditPrediction}) can be used to accept the current edit prediction up to the new line boundary. ## Configuring Edit Prediction Keybindings {#edit-predictions-keybinding} @@ -63,7 +75,8 @@ By default, `tab` is used to accept edit predictions. You can use another keybin } ``` -When there's a [conflict with the `tab` key](#edit-predictions-conflict), Zed uses a different context to accept keybindings (`edit_prediction_conflict`). If you want to use a different one, you can insert this in your keymap: +When there's a [conflict with the `tab` key](#edit-predictions-conflict), Zed uses a different key context to accept keybindings (`edit_prediction_conflict`). +If you want to use a different one, you can insert this in your keymap: ```json [settings] { @@ -76,7 +89,8 @@ When there's a [conflict with the `tab` key](#edit-predictions-conflict), Zed us If your keybinding contains a modifier (`ctrl` in the example above), it will also be used to preview the edit prediction and temporarily hide the language server completion menu. -You can also bind this action to keybind without a modifier. In that case, Zed will use the default modifier (`alt`) to preview the edit prediction. +You can also bind this action to keybind without a modifier. +In that case, Zed will use the default modifier (`alt`) to preview the edit prediction. ```json [settings] { @@ -101,9 +115,26 @@ To maintain the use of the modifier key for accepting predictions when there is } ``` +### Keybinding Example: Always Use Tab + +If you want to use `tab` to always accept edit predictions, you can use the following keybinding: + +```json [keymap] +{ + "context": "Editor && edit_prediction_conflict && showing_completions", + "bindings": { + "tab": "editor::AcceptEditPrediction" + } +} +``` + +This will make `tab` work to accept edit predictions _even when_ you're also seeing language server completions. +That means that you need to rely on `enter` for accepting the latter. + ### Keybinding Example: Always Use Alt-Tab -The keybinding example below causes `alt-tab` to always be used instead of sometimes using `tab`. You might want this in order to have just one keybinding to use for accepting edit predictions, since the behavior of `tab` varies based on context. +The keybinding example below causes `alt-tab` to always be used instead of sometimes using `tab`. +You might want this in order to have just one (alternative) keybinding to use for accepting edit predictions, since the behavior of `tab` varies based on context. ```json [keymap] { @@ -127,7 +158,7 @@ The keybinding example below causes `alt-tab` to always be used instead of somet }, ``` -If `"vim_mode": true` is set within `settings.json`, then additional bindings are needed after the above to return `tab` to its original behavior: +If you are using [Vim mode](../vim.md), then additional bindings are needed after the above to return `tab` to its original behavior: ```json [keymap] { @@ -146,7 +177,8 @@ If `"vim_mode": true` is set within `settings.json`, then additional bindings ar ### Keybinding Example: Displaying Tab and Alt-Tab on Linux -While `tab` and `alt-tab` are supported on Linux, `alt-l` is displayed instead. If your window manager does not reserve `alt-tab`, and you would prefer to use `tab` and `alt-tab`, include these bindings in `keymap.json`: +While `tab` and `alt-tab` are supported on Linux, `alt-l` is displayed instead. +If your window manager does not reserve `alt-tab`, and you would prefer to use `tab` and `alt-tab`, include these bindings in `keymap.json`: ```json [keymap] { diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index 59ad764483b74b9b7c1557082d953568b90b802f..0467913b072ef296a5b187fbeb8dc6a406bf1bed 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -33,7 +33,7 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your #### Installation -The first time you create a Gemini CLI thread, Zed will install [@google/gemini-cli](https://github.com/zed-industries/claude-code-acp). This installation is only available to Zed and is kept up to date as you use the agent. +The first time you create a Gemini CLI thread, Zed will install [@google/gemini-cli](https://github.com/google-gemini/gemini-cli). This installation is only available to Zed and is kept up to date as you use the agent. By default, Zed will use this managed version of Gemini CLI even if you have it installed globally. However, you can configure it to use a version in your `PATH` by adding this to your settings: @@ -168,6 +168,8 @@ To ensure you're using your billing method of choice, [open a new Codex thread]( If you are already logged in and want to change your authentication method, type `/logout` in the thread and authenticate again. +If you want to use a third-party provider with Codex, you can configure that with your [Codex config.toml](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) or pass extra [args/env variables](https://github.com/openai/codex/blob/main/docs/config.md#model-selection) to your Codex agent servers settings. + #### Installation The first time you create a Codex thread, Zed will install [codex-acp](https://github.com/zed-industries/codex-acp). This installation is only available to Zed and is kept up to date as you use the agent. @@ -194,6 +196,7 @@ You can also add agents through your `settings.json`, by specifying certain fiel { "agent_servers": { "My Custom Agent": { + "type": "custom", "command": "node", "args": ["~/projects/agent/index.js", "--acp"], "env": {} @@ -206,6 +209,31 @@ This can be useful if you're in the middle of developing a new agent that speaks It's also possible to specify a custom path, arguments, or environment for the builtin integrations by using the `claude` and `gemini` names. +### Custom Keybinding For Extension-Based Agents + +To assign a custom keybinding to start a new thread for agents that were added by installing agent server extensions, add the following snippet to your `keymap.json` file: + +```json [keymap] +{ + "bindings": { + "cmd-alt-n": [ // Your custom keybinding + "agent::NewExternalAgentThread", + { + "agent": { + "custom": { + "name": "My Agent", // The agent name as it appears in the UI (e.g., "OpenCode", "Auggie CLI", etc.) + "command": { + "command": "my-agent", // The agent name in lowercase with no spaces + "args": ["acp"] + } + } + } + } + ] + } +}, +``` + ## Debugging Agents When using external agents in Zed, you can access the debug view via with `dev: open acp logs` from the Command Palette. This lets you see the messages being sent and received between Zed and the agent. diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index b737be53eac87cd630303556c1d0f8fcd8d406a1..ee495b1ba7e67a6cc15359453fd7d3ae41b17233 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -89,12 +89,32 @@ To do this: #### Cross-Region Inference -The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) for all the models and region combinations that support it. +The Zed implementation of Amazon Bedrock uses [Cross-Region inference](https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference.html) to improve availability and throughput. With Cross-Region inference, you can distribute traffic across multiple AWS Regions, enabling higher throughput. -For example, if you use `Claude Sonnet 3.7 Thinking` from `us-east-1`, it may be processed across the US regions, namely: `us-east-1`, `us-east-2`, or `us-west-2`. -Cross-Region inference requests are kept within the AWS Regions that are part of the geography where the data originally resides. -For example, a request made within the US is kept within the AWS Regions in the US. +##### Regional vs Global Inference Profiles + +Bedrock supports two types of cross-region inference profiles: + +- **Regional profiles** (default): Route requests within a specific geography (US, EU, APAC). For example, `us-east-1` uses the `us.*` profile which routes across `us-east-1`, `us-east-2`, and `us-west-2`. +- **Global profiles**: Route requests across all commercial AWS Regions for maximum availability and performance. + +By default, Zed uses **regional profiles** which keep your data within the same geography. You can opt into global profiles by adding `"allow_global": true` to your Bedrock configuration: + +```json [settings] +{ + "language_models": { + "bedrock": { + "authentication_method": "named_profile", + "region": "your-aws-region", + "profile": "your-profile-name", + "allow_global": true + } + } +} +``` + +**Note:** Only select newer models support global inference profiles. See the [AWS Bedrock supported models documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system) for the current list of models that support global inference. If you encounter availability issues with a model in your region, enabling `allow_global` may resolve them. Although the data remains stored only in the source Region, your input prompts and output results might move outside of your source Region during cross-Region inference. All data will be transmitted encrypted across Amazon's secure network. @@ -327,6 +347,33 @@ Download and install Ollama from [ollama.com/download](https://ollama.com/downlo 3. In the Agent Panel, select one of the Ollama models using the model dropdown. +#### Ollama Autodiscovery + +Zed will automatically discover models that Ollama has pulled. You can turn this off by setting +the `auto_discover` field in the Ollama settings. If you do this, you should manually specify which +models are available. + +```json [settings] +{ + "language_models": { + "ollama": { + "api_url": "http://localhost:11434", + "auto_discover": false, + "available_models": [ + { + "name": "qwen2.5-coder", + "display_name": "qwen 2.5 coder", + "max_tokens": 32768, + "supports_tools": true, + "supports_thinking": true, + "supports_images": true + } + ] + } + } +} +``` + #### Ollama Context Length {#ollama-context} Zed has pre-configured maximum context lengths (`max_tokens`) to match the capabilities of common models. @@ -587,7 +634,7 @@ These routing controls let you fine‑tune cost, capability, and reliability tra ### Vercel v0 {#vercel-v0} -[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. +[Vercel v0](https://v0.app/docs/api/model) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. It supports text and image inputs and provides fast streaming responses. The v0 models are [OpenAI-compatible models](/#openai-api-compatible), but Vercel is listed as first-class provider in the panel's settings view. diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md index 8fa36675ec46ed6ae1830dd32196815c34ab587f..956477a1c2872d9371f770c3a767e5a77bead9fa 100644 --- a/docs/src/ai/mcp.md +++ b/docs/src/ai/mcp.md @@ -40,11 +40,14 @@ You can connect them by adding their commands directly to your `settings.json`, ```json [settings] { "context_servers": { - "your-mcp-server": { - "source": "custom", + "local-mcp-server": { "command": "some-command", "args": ["arg-1", "arg-2"], "env": {} + }, + "remote-mcp-server": { + "url": "custom", + "headers": { "Authorization": "Bearer " } } } } diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md index 5b379fc75435c14ac46587f7449c7a5c54becfcf..6033bf23fad372b15909ff09a43f2747f3e619c0 100644 --- a/docs/src/ai/models.md +++ b/docs/src/ai/models.md @@ -5,6 +5,10 @@ We’re working hard to expand the models supported by Zed’s subscription offe | Model | Provider | Token Type | Provider Price per 1M tokens | Zed Price per 1M tokens | | ---------------------- | --------- | ------------------- | ---------------------------- | ----------------------- | +| Claude Opus 4.5 | Anthropic | Input | $5.00 | $5.50 | +| | Anthropic | Output | $25.00 | $27.50 | +| | Anthropic | Input - Cache Write | $6.25 | $6.875 | +| | Anthropic | Input - Cache Read | $0.50 | $0.55 | | Claude Opus 4.1 | Anthropic | Input | $15.00 | $16.50 | | | Anthropic | Output | $75.00 | $82.50 | | | Anthropic | Input - Cache Write | $18.75 | $20.625 | @@ -34,6 +38,8 @@ We’re working hard to expand the models supported by Zed’s subscription offe | GPT-5 nano | OpenAI | Input | $0.05 | $0.055 | | | OpenAI | Output | $0.40 | $0.44 | | | OpenAI | Cached Input | $0.005 | $0.0055 | +| Gemini 3.0 Pro | Google | Input | $2.00 | $2.20 | +| | Google | Output | $12.00 | $13.20 | | Gemini 2.5 Pro | Google | Input | $1.25 | $1.375 | | | Google | Output | $10.00 | $11.00 | | Gemini 2.5 Flash | Google | Input | $0.30 | $0.33 | @@ -63,6 +69,7 @@ A context window is the maximum span of text and code an LLM can consider at onc | Model | Provider | Zed-Hosted Context Window | | ----------------- | --------- | ------------------------- | +| Claude Opus 4.5 | Anthropic | 200k | | Claude Opus 4.1 | Anthropic | 200k | | Claude Sonnet 4 | Anthropic | 200k | | Claude Sonnet 3.7 | Anthropic | 200k | @@ -72,6 +79,7 @@ A context window is the maximum span of text and code an LLM can consider at onc | GPT-5 nano | OpenAI | 400k | | Gemini 2.5 Pro | Google | 200k | | Gemini 2.5 Flash | Google | 200k | +| Gemini 3.0 Pro | Google | 200k | > We're planning on expanding supported context windows for hosted Sonnet 4 and Gemini 2.5 Pro/Flash in the near future. Stay tuned! diff --git a/docs/src/ai/plans-and-usage.md b/docs/src/ai/plans-and-usage.md index fc59a894aacd524a10e31b65ababd4f8d79e3b8e..63f72211aa70b19b820fb9b368d47a3b008b726d 100644 --- a/docs/src/ai/plans-and-usage.md +++ b/docs/src/ai/plans-and-usage.md @@ -12,11 +12,11 @@ Usage of Zed's hosted models is measured on a token basis, converted to dollars Zed Pro comes with $5 of monthly dollar credit. A trial of Zed Pro includes $20 of credit, usable for 14 days. Monthly included credit resets on your monthly billing date. -To view your current usage, you can visit your account at [zed.dev/account](https://zed.dev/account). Information from our metering and billing provider, Orb, is embedded on that page. +To view your current usage, you can visit your account at [dashboard.zed.dev/account](https://dashboard.zed.dev/account). Information from our metering and billing provider, Orb, is embedded on that page. ## Spend Limits {#usage-spend-limits} -At the top of [the Account page](https://zed.dev/account), you'll find an input for `Maximum Token Spend`. The dollar amount here specifies your _monthly_ limit for spend on tokens, _not counting_ the $5/month included with your Pro subscription. +At the top of [the Account page](https://dashboard.zed.dev/account), you'll find an input for `Maximum Token Spend`. The dollar amount here specifies your _monthly_ limit for spend on tokens, _not counting_ the $5/month included with your Pro subscription. The default value for all Pro users is $10, for a total monthly spend with Zed of $20 ($10 for your Pro subscription, $10 in incremental token spend). This can be set to $0 to limit your spend with Zed to exactly $10/month. If you adjust this limit _higher_ than $10 and consume more than $10 of incremental token spend, you'll be billed via [threshold billing](./billing.md#threshold-billing). diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md index 6921567b9165e863cd4303752a669e641e6fcdca..d72cc8c476a83f60d8342962fcdd410e541e7356 100644 --- a/docs/src/ai/privacy-and-security.md +++ b/docs/src/ai/privacy-and-security.md @@ -2,7 +2,7 @@ ## Philosophy -Zed aims to collect on the minimum data necessary to serve and improve our product. +Zed aims to collect only the minimum data necessary to serve and improve our product. We believe in opt-in data sharing as the default in building AI products, rather than opt-out, like most of our competitors. Privacy Mode is not a setting to be toggled, it's a default stance. @@ -12,6 +12,8 @@ It is entirely possible to use Zed, including Zed's AI capabilities, without sha ## Documentation +- [Worktree trust](../worktree-trust.md): How Zed opens files and directories in restricted mode. + - [Telemetry](../telemetry.md): How Zed collects general telemetry data. - [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions. diff --git a/docs/src/ai/text-threads.md b/docs/src/ai/text-threads.md index eb53051ee00006ab7866cf13bd942a45dc95da40..c82cda7265d94ca197bbdba9909ef41aa46f8651 100644 --- a/docs/src/ai/text-threads.md +++ b/docs/src/ai/text-threads.md @@ -235,14 +235,15 @@ See [Extension: Slash Commands](../extensions/slash-commands.md) to learn how to ## Text Threads vs. Threads -For a while, text threads were the only way to interact with AI in Zed. -We have since introduced, back in May 2025, a new take on the agent panel, which, as opposed to being editor-driven, optimizes for readability. -You can read more about it in [the Agent Panel page](./agent-panel.md). - -However, aside from many interaction differences, the major difference between one vs. the other is that tool calls don't work in Text Threads. -So, it's accurate to say that Text Threads aren't necessarily "agentic", as they can't perform any action on your behalf. -Think of it more like a regular and "traditional" AI chat, where what you'll get out of the model is simply just text. -Consequently, [external agents](./external-agents.md) are also not available in Text Threads. +For some time, text threads were the only way to interact with AI in Zed. +In May 2025, we introduced a new version of the agent panel, which, as opposed to being editor-based, is optimized for readability. +Visit [the Agent Panel page](./agent-panel.md) to learn more about it. + +More importantly, aside from the many UI differences, the major aspect that sets one apart from the other is that tool calls don't work in Text Threads. +Due to that, it's accurate to say that Text Threads aren't conceptually agentic, as they can't perform any action on your behalf (or any action at all). + +Think of it more like a regular/"traditional" AI chat, where the only thing you can get from the model is simply just text. +Consequently, [MCP servers](./mcp.md) and [external agents](./external-agents.md) are also not available in Text Threads. ## Advanced Concepts diff --git a/docs/src/all-actions.md b/docs/src/all-actions.md index d20f7cfd63c01c03937c85e8f46476711c80e30f..e5a45a8fd8d5926e657d6ad35234fd14d4853952 100644 --- a/docs/src/all-actions.md +++ b/docs/src/all-actions.md @@ -1,3 +1,3 @@ -## All Actions +# All Actions {#ACTIONS_TABLE#} diff --git a/docs/src/authentication.md b/docs/src/authentication.md index 6d05567e3198ed5180b65dc0fb5f470baa679f9e..0ea97040a0ae2023143beb5a83d15cd9e28c9786 100644 --- a/docs/src/authentication.md +++ b/docs/src/authentication.md @@ -4,7 +4,7 @@ Signing in to Zed is not required. You can use most features you'd expect in a c ## What Features Require Signing In? -1. All real-time [collaboration features](./collaboration.md). +1. All real-time [collaboration features](./collaboration/overview.md). 2. [LLM-powered features](./ai/overview.md), if you are using Zed as the provider of your LLM models. To use AI without signing in, you can [bring and configure your own API keys](./ai/llm-providers.md#use-your-own-keys). ## Signing In diff --git a/docs/src/channels.md b/docs/src/channels.md deleted file mode 100644 index afd97cdabc51f8c54ffd3f85d02c7aa0764d2f8b..0000000000000000000000000000000000000000 --- a/docs/src/channels.md +++ /dev/null @@ -1,52 +0,0 @@ -# Channels - -At Zed we believe that great things are built by great people working together. We have designed Zed to help every individual work faster and to help teams of people work together more effectively. - -## Overview - -Channels provide a way to streamline collaborating for software engineers in many ways, but particularly: - -- Pairing – when working on something together, you both have your own screen, mouse, and keyboard. -- Mentoring – it’s easy to jump in to someone else’s context, and help them get unstuck, without the friction of pushing code up. -- Refactoring – you can have multiple people join in on large refactoring without fear of conflict. -- Ambient awareness – you can see what everyone else is working on with no need for status emails or meetings. - -## Channels - -To open the collaboration panel hit {#kb collab_panel::ToggleFocus} or `collab panel: toggle focus`. - -Each channel corresponds to an ongoing project or work-stream. You can see who’s in a channel as their avatars will show up in the sidebar. This makes it easy to see what everyone is doing and where to find them if needed. - -You can create as many channels as you need. As in the example above, you can mix channels for your day job, as well as side-projects in one instance of Zed. - -Joining a channel adds you to a shared room where you can work on projects together. - -## Sharing projects - -After joining a channel, you can `Share` a project with the other people there. This will enable them to edit the code hosted on your machine as though they had it checked out locally. - -When you are editing someone else’s project, you still have the full power of the editor at your fingertips, you can jump to definitions, use the AI assistant, and see any diagnostic errors. This is extremely powerful for pairing, as one of you can be implementing the current method while the other is reading and researching the correct solution to the next problem. And, because you have your own config running, it feels like you’re using your own machine. - -See [our collaboration documentation](./collaboration.md) for more details about how this works. - -## Notes - -Each channel has a notes file associated with it to keep track of current status, new ideas, or to collaborate on building out the design for the feature that you’re working on before diving into code. - -This is similar to a Google Doc, except powered by Zed's collaborative software and persisted to our servers. - -## Inviting people - -By default, channels you create can only be accessed by you. You can invite collaborators by right clicking and selecting `Manage members`. - -When you have channels nested under each other, permissions are inherited. For instance, in the example above, we only need to add people to the `#zed` channel, and they will automatically gain access to `#core-editor`, `#new-languages`, and `#stability`. - -Once you have added someone, they can either join your channel by clicking on it in their Zed sidebar, or you can share the link to the channel so that they can join directly. - -## Livestreaming & Guests - -A Channel can also be made Public. This allows anyone to join the channel by clicking on the link. - -Guest users in channels can hear and see everything that is happening, and have read only access to projects and channel notes. - -If you'd like to invite a guest to participate in a channel for the duration of a call you can do so by right clicking on them in the Collaboration Panel. "Allowing Write Access" will allow them to edit any projects shared into the call, and to use their microphone and share their screen if they wish. diff --git a/docs/src/collaboration.md b/docs/src/collaboration.md deleted file mode 100644 index 8992c7d6ca0185e08ac3923359b0dee9a2fadbfe..0000000000000000000000000000000000000000 --- a/docs/src/collaboration.md +++ /dev/null @@ -1,103 +0,0 @@ -# Collaboration - -Only collaborate with people that you trust. Since sharing a project gives them access to your local file system, you should not share projects with people you do not trust; they could potentially do some nasty things. - -In the future, we will do more to prevent this type of access beyond the shared project and add more control over what collaborators can do, but for now, only collaborate with people you trust. - -## Adding a collaborator to a call - -Before you can collaborate, you'll need to add a collaborator to your contacts. To do this: - -1. Open the contacts menu by clicking on the `Show contacts menu` button in the upper right-hand corner of the window or by running `collab: toggle contacts menu` (`cmd-shift-c`). -2. Click the add button to the right of the search box. -3. Search for the contact you want to add using their GitHub handle. Note: the person you are trying to add as a contact must be an existing Zed user. - -### Inviting a collaborator - -You can add an existing Zed user as a contact from the contacts menu, deployed from the `Show contacts menu` button in the upper right-hand corner of the window or by `collab: toggle contacts menu` (`cmd-shift-c`) and then clicking the `Search for new contact` button to the right of the search box. - -![Inviting a collaborator to the current project](https://zed.dev/img/collaboration/add-a-collaborator.png) - -When you invite a collaborator to a project not in a call they will receive a notification to join, and a new call is created. - -![Receiving an invite to join a call](https://zed.dev/img/collaboration/receiving-an-invite.jpg) - -### Inviting non-Zed users - -If someone you want to collaborate with has not yet signed up for Zed, they will need to [download the app](https://zed.dev/download) and sign in for the first time before you can add them. Identity is tied to GitHub accounts, so new users will need to authenticate with GitHub in order to sign into Zed. - -### Voice chat - -When joining a call, Zed will automatically share your microphone with other users in the call, if your OS allows it. This isn't tied to your project. You can disable this for your client via the [`mute_on_join`](./configuring-zed.md#calls) setting. - -## Collaborating on a project - -### Share a project - -When you invite a collaborator to join your project, a new call begins. Your Zed windows will show the call participants in the title bar of the window. - -![A new Zed call with two collaborators](https://zed.dev/img/collaboration/new-call.png) - -Collaborators in the same project as you are in color, and have a cursor color. Collaborators in other projects are shown in gray. Collaborators that have access to the current project will have their own cursor color under their avatar. - -We aim to eliminate the distinction between local and remote projects as much as possible. Collaborators can open, edit, and save files, perform searches, interact with the language server, etc. Guests have a read-only view of the project, including access to language server info. - -#### Unshared Projects - -If a collaborator is currently in a project that is not shared, you will not be able to jump to their project or follow them until they either share the project or return to a project that is shared. - -If you are in a project that isn't shared, others will not be able to join it or see its contents. - -### Follow a collaborator - -To follow a collaborator, click on their avatar in the top right of the window. You can also cycle through collaborators using `workspace: follow next collaborator` (`ctrl-alt-cmd-f`). - -When you join a project, you'll immediately start following the collaborator that invited you. - -![Automatically following the person inviting us to a project](https://zed.dev/img/collaboration/joining-a-call.png) - -When you are in a pane that is following a collaborator, you will: - -- follow their cursor and scroll position -- follow them to other files in the same project -- instantly swap to viewing their screen in that pane, if they are sharing their screen and leave the project - -If you move your cursor or make an edit in that pane, you will stop following. - -To start following again, you can click on a collaborator's avatar or cycle through following different participants by pressing `workspace: follow next collaborator` (`ctrl-alt-cmd-f`). - -#### How following works - -Following is confined to a particular pane. When a pane is following a collaborator, it is outlined in their cursor color. - -This pane-specific behavior allows you to follow someone in one pane while navigating independently in another and can be an effective layout for some collaboration styles. - -### Sharing your screen - -Share your screen with collaborators in the current call by clicking on the `Share screen` button in the top right of the window. - -Collaborators will see your screen if they are following you and you start viewing a window outside Zed or a project that is not shared. - -Collaborators can see your entire screen when you are screen sharing, so be careful not to share anything you don't want to share. Remember to stop screen sharing when you are finished. - -Call participants can open a dedicated tab for your screen share by opening the contacts menu in the top right and clicking on the `Screen` entry if you are sharing your screen. - -### Adding a project - -You can add a project to a call by clicking on the `Share` button next to the project name in the title bar. - -### Removing a project - -You can remove a project from a call by clicking on the `Unshare` button next to the project name in the title bar. - -Collaborators that are currently in that project will be disconnected from the project and will not be able to rejoin it unless you share it again. - -### Following a collaborator's terminal - -You can follow what a collaborator is doing in their terminal by having them share their screen and following it. - -In the future, we plan to allow you to collaborate in the terminal directly in a shared project. - -### Leave call - -You can leave a call by opening the contacts menu in the top right and clicking on the `Leave call` button. diff --git a/docs/src/collaboration/channels.md b/docs/src/collaboration/channels.md new file mode 100644 index 0000000000000000000000000000000000000000..ebc2760275c7e3382dfabeac296dfede1b58d268 --- /dev/null +++ b/docs/src/collaboration/channels.md @@ -0,0 +1,122 @@ +# Channels + +Channels provide a way to streamline collaborating for software engineers in many ways, but particularly: + +- Pairing – when working on something together, you both have your own screen, mouse, and keyboard. +- Mentoring – it's easy to jump in to someone else's context, and help them get unstuck, without the friction of pushing code up. +- Refactoring – you can have multiple people join in on large refactoring without fear of conflict. +- Ambient awareness – you can see what everyone else is working on with no need for status emails or meetings. + +Each channel corresponds to an ongoing project or work-stream. +You can see who's in a channel as their avatars will show up in the sidebar. +This makes it easy to see what everyone is doing and where to find them if needed. + +Create a channel by clicking the `+` icon next to the `Channels` text in the collab panel. +Create a subchannel by right clicking an existing channel and selecting `New Subchannel`. + +You can mix channels for your day job, as well as side-projects in your collab panel. + +Joining a channel adds you to a shared room where you can work on projects together. + +_Join [our channel tree](https://zed.dev/channel/zed-283) to get an idea of how you can organize yours._ + +## Inviting People + +By default, channels you create can only be accessed by you. +You can invite collaborators by right clicking and selecting `Manage members`. + +When you have subchannels nested under others, permissions are inherited. +For instance, adding people to the top-level channel in your channel tree will automatically give them access to its subchannels. + +Once you have added someone, they can either join your channel by clicking on it in their Zed sidebar, or you can share the link to the channel so that they can join directly. + +## Voice Chat + +You can mute/unmute your microphone via the microphone icon in the upper right-hand side of the window. + +> Note: When joining a channel, Zed will automatically share your microphone with other users in the call, if your OS allows it. +> If you'd prefer your microphone to be off when joining a channel, you can do so via the [`mute_on_join`](../configuring-zed.md#calls) setting. + +## Sharing Projects + +After joining a channel, you can share a project over the channel via the `Share` button in the upper right-hand side of the window. +This will allow channel members to edit the code hosted on your machine as though they had it checked out locally. + +When you are editing someone else's project, you still have the full power of the editor at your fingertips; you can jump to definitions, use the AI assistant, and see any diagnostic errors. +This is extremely powerful for pairing, as one of you can be implementing the current method while the other is reading and researching the correct solution to the next problem. +And, because you have your own config running, it feels like you're using your own machine. + +We aim to eliminate the distinction between local and remote projects as much as possible. +Collaborators can open, edit, and save files, perform searches, interact with the language server, etc. +Guests have a read-only view of the project, including access to language server info. + +### Unsharing a Project + +You can remove a project from a channel by clicking on the `Unshare` button in the title bar. + +Collaborators that are currently in that project will be disconnected from the project and will not be able to rejoin it unless you share it again. + +## Channel Notes + +Each channel has a Markdown notes file associated with it to keep track of current status, new ideas, or to collaborate on building out the design for the feature that you're working on before diving into code. + +This is similar to a Google Doc, except powered by Zed's collaborative software and persisted to our servers. + +Open the channel notes by clicking on the document icon to the right of the channel name in the collaboration panel. + +> Note: You can view a channel's notes without joining the channel, if you'd just like to read up on what has been written. + +## Following Collaborators + +To follow a collaborator, click on their avatar in the top left of the title bar. +You can also cycle through collaborators using {#kb workspace::FollowNextCollaborator} or `workspace: follow next collaborator` in the command palette. + +When you join a project, you'll immediately start following the collaborator that invited you. + +When you are in a pane that is following a collaborator, you will: + +- follow their cursor and scroll position +- follow them to other files in the same project +- instantly swap to viewing their screenshare in that pane, if they are sharing their screen and leave the project + +To stop following, simply move your mouse or make an edit via your keyboard. + +### How Following Works + +Following is confined to a particular pane. +When a pane is following a collaborator, it is outlined in their cursor color. + +Avatars of collaborators in the same project as you are in color, and have a cursor color. +Collaborators in other projects are shown in gray. + +This pane-specific behavior allows you to follow someone in one pane while navigating independently in another and can be an effective layout for some collaboration styles. + +### Following a Terminal + +Following is not currently supported in the terminal in the way it is supported in the editor. +As a workaround, collaborators can share their screen and you can follow that instead. + +## Screen Sharing + +Share your screen with collaborators in the current channel by clicking on the `Share screen` (monitor icon) button in the top right of the title bar. +If you have multiple displays, you can choose which one to share via the chevron to the right of the monitor icon. + +After you've shared your screen, others can click on the `Screen` entry under your name in the collaboration panel to open a tab that always keeps it visible. +If they are following you, Zed will automatically switch between following your cursor in their Zed instance and your screen share, depending on whether you are focused on Zed or another application, like a web browser. + +> Note: Collaborators can see your entire screen when you are screen sharing, so be careful not to share anything you don't want to share. +> Remember to stop screen sharing when you are finished. + +## Livestreaming & Guests + +A Channel can also be made Public. +This allows anyone to join the channel by clicking on the link. + +Guest users in channels can hear and see everything that is happening, and have read only access to projects and channel notes. + +If you'd like to invite a guest to participate in a channel for the duration of a call you can do so by right clicking on them in the Collaboration Panel. +"Allowing Write Access" will allow them to edit any projects shared into the call, and to use their microphone and share their screen if they wish. + +## Leaving a Call + +You can leave a channel by clicking on the `Leave call` button in the upper right-hand side of the window. diff --git a/docs/src/collaboration/contacts-and-private-calls.md b/docs/src/collaboration/contacts-and-private-calls.md new file mode 100644 index 0000000000000000000000000000000000000000..f011fa2c672c2e6e563e65172705115802262a7e --- /dev/null +++ b/docs/src/collaboration/contacts-and-private-calls.md @@ -0,0 +1,25 @@ +# Contacts and Private Calls + +Zed allows you to have private calls / collaboration sessions with those in your contacts. +These calls can be one-on-ones or contain any number of users from your contacts. + +## Adding a Contact + +1. In the collaboration panel, click the `+` button next to the `Contacts` section +1. Search for the contact using their GitHub handle.\ + _Note: Your contact must be an existing Zed user who has completed the GitHub authentication sign-in flow._ +1. Your contact will receive a notification. + Once they accept, you'll both appear in each other's contact list. + +## Private Calls + +To start up a private call... + +1. Click the `...` menu next to an online contact's name in the collaboration panel. +1. Click `Call ` + +Once you've begun a private call, you can add other online contacts by clicking on their name in the collaboration panel. + +--- + +_Aside from a few additional features (channel notes, etc.), collaboration in private calls is largely the same as it is in [channels](./channels.md)._ diff --git a/docs/src/collaboration/overview.md b/docs/src/collaboration/overview.md new file mode 100644 index 0000000000000000000000000000000000000000..719aa56ee3b62c8562cd03ff8dd29faf25f2df5b --- /dev/null +++ b/docs/src/collaboration/overview.md @@ -0,0 +1,24 @@ +# Collaboration + +At Zed, we believe that great things are built by great people working together. +We have designed Zed to help individuals work faster and help teams of people work together more effectively. + +In Zed, all collaboration happens in the collaboration panel, which can be opened via {#kb collab_panel::ToggleFocus} or `collab panel: toggle focus` from the command palette. + +You will need to [sign in](../authentication.md#signing-in) in order to access features within the collaboration panel. + +## Collaboration panel + +The collaboration panel is broken down into two sections: + +1. [Channels](./channels.md): Ongoing project rooms where team members can share projects, collaborate on code, and maintain ambient awareness of what everyone is working on. +1. [Contacts and Private Calls](./contacts-and-private-calls.md): Your contacts list for ad-hoc private collaboration. + +--- + +> Note: Only collaborate with people that you trust. +> Since sharing a project gives them access to your local file system, you should not share projects with people you do not trust; they could potentially do some nasty things. +> +> In the future, we will do more to prevent this type of access beyond the shared project and add more control over what collaborators can do, but for now, only collaborate with people you trust. + +See our [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for collaboration. diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index e04d63f5d16a83c84b933d9f59db901c276b7a6d..9185b679063a7e6c4d5026a6df448f8ac24f0a8b 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -1,4 +1,4 @@ -# Configuring supported languages +# Configuring Supported Languages Zed offers powerful customization options for each programming language it supports. This guide will walk you through the various ways you can tailor your coding experience to your preferences and project requirements. @@ -58,6 +58,7 @@ You can customize a wide range of settings for each language, including: - [`soft_wrap`](./configuring-zed.md#soft-wrap): How to wrap long lines of code - [`show_completions_on_input`](./configuring-zed.md#show-completions-on-input): Whether or not to show completions as you type - [`show_completion_documentation`](./configuring-zed.md#show-completion-documentation): Whether to display inline and alongside documentation for items in the completions menu +- [`colorize_brackets`](./configuring-zed.md#colorize-brackets): Whether to use tree-sitter bracket queries to detect and colorize the brackets in the editor (also known as "rainbow brackets") These settings allow you to maintain specific coding styles across different languages and projects. @@ -122,7 +123,7 @@ You can specify your preference using the `language_servers` setting: ```json [settings] "languages": { "PHP": { - "language_servers": ["intelephense", "!phpactor", "..."] + "language_servers": ["intelephense", "!phpactor", "!phptools", "..."] } } ``` diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 145620c3962984407db73bf7ac4c0a3bbfa75324..8a638d9f7857e1a55aaa5589a77110a7b803bbfe 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -213,7 +213,7 @@ Note: This setting has no effect in Vim mode, as rewrap is already allowed every ## Auto Install extensions - Description: Define extensions to be autoinstalled or never be installed. -- Setting: `auto_install_extension` +- Setting: `auto_install_extensions` - Default: `{ "html": true }` **Options** @@ -584,10 +584,11 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed **Options** -There are two options to choose from: +There are three options to choose from: 1. `shell_hook`: Use the shell hook to load direnv. This relies on direnv to activate upon entering the directory. Supports POSIX shells and fish. 2. `direct`: Use `direnv export json` to load direnv. This will load direnv directly without relying on the shell hook and might cause some inconsistencies. This allows direnv to work with any shell. +3. `disabled`: No shell environment will be loaded automatically; direnv must be invoked manually (e.g. with `direnv exec`) to be used. ## Double Click In Multibuffer @@ -1450,6 +1451,47 @@ or `boolean` values +### Session + +- Description: Controls Zed lifecycle-related behavior. +- Setting: `session` +- Default: + +```json +{ + "session": { + "restore_unsaved_buffers": true, + "trust_all_worktrees": false + } +} +``` + +**Options** + +1. Whether or not to restore unsaved buffers on restart: + +```json [settings] +{ + "session": { + "restore_unsaved_buffers": true + } +} +``` + +If this is true, user won't be prompted whether to save/discard dirty files when closing the application. + +2. Whether or not to skip worktree and workspace trust checks: + +```json [settings] +{ + "session": { + "trust_all_worktrees": false + } +} +``` + +When trusted, project settings are synchronized automatically, language and MCP servers are downloaded and started automatically. + ### Drag And Drop Selection - Description: Whether to allow drag and drop text selection in buffer. `delay` is the milliseconds that must elapse before drag and drop is allowed. Otherwise, a new text selection is created. @@ -2519,11 +2561,12 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif "path": "~", "hour_format": "hour12" } + ``` ### Path -- Description: The path of the directory where journal entries are stored. +- Description: The path of the directory where journal entries are stored. If an invalid path is specified, the journal will fall back to using `~` (the home directory). - Setting: `path` - Default: `~` @@ -2859,11 +2902,25 @@ Configuration object for defining settings profiles. Example: ```json [settings] "preview_tabs": { "enabled": true, + "enable_preview_from_project_panel": true, "enable_preview_from_file_finder": false, - "enable_preview_from_code_navigation": false, + "enable_preview_from_multibuffer": true, + "enable_preview_multibuffer_from_code_navigation": false, + "enable_preview_file_from_code_navigation": true, + "enable_keep_preview_on_code_navigation": false, } ``` +### Enable preview from project panel + +- Description: Determines whether to open files in preview mode when opened from the project panel with a single click. +- Setting: `enable_preview_from_project_panel` +- Default: `true` + +**Options** + +`boolean` values + ### Enable preview from file finder - Description: Determines whether to open files in preview mode when selected from the file finder. @@ -2874,10 +2931,40 @@ Configuration object for defining settings profiles. Example: `boolean` values -### Enable preview from code navigation +### Enable preview from multibuffer + +- Description: Determines whether to open files in preview mode when opened from a multibuffer. +- Setting: `enable_preview_from_multibuffer` +- Default: `true` + +**Options** + +`boolean` values + +### Enable preview multibuffer from code navigation + +- Description: Determines whether to open tabs in preview mode when code navigation is used to open a multibuffer. +- Setting: `enable_preview_multibuffer_from_code_navigation` +- Default: `false` + +**Options** + +`boolean` values + +### Enable preview file from code navigation + +- Description: Determines whether to open tabs in preview mode when code navigation is used to open a single file. +- Setting: `enable_preview_file_from_code_navigation` +- Default: `true` + +**Options** + +`boolean` values + +### Enable keep preview on code navigation -- Description: Determines whether a preview tab gets replaced when code navigation is used to navigate away from the tab. -- Setting: `enable_preview_from_code_navigation` +- Description: Determines whether to keep tabs in preview mode when code navigation is used to navigate away from them. If `enable_preview_file_from_code_navigation` or `enable_preview_multibuffer_from_code_navigation` is also true, the new tab may replace the existing one. +- Setting: `enable_keep_preview_on_code_navigation` - Default: `false` **Options** @@ -3096,7 +3183,15 @@ List of strings containing any combination of: ```json [settings] { - "restore_on_startup": "none" + "restore_on_startup": "empty_tab" +} +``` + +4. Always start with the welcome launchpad: + +```json [settings] +{ + "restore_on_startup": "launchpad" } ``` @@ -3585,6 +3680,7 @@ List of `integer` column numbers "option_as_meta": false, "button": true, "shell": "system", + "scroll_multiplier": 3.0, "toolbar": { "breadcrumbs": false }, @@ -3997,6 +4093,26 @@ Disable with: } ``` +### Terminal: Scroll Multiplier + +- Description: The multiplier for scrolling speed in the terminal when using mouse wheel or trackpad. +- Setting: `scroll_multiplier` +- Default: `1.0` + +**Options** + +Positive floating point values. Values less than or equal to 0 will be clamped to a minimum of 0.01. + +**Example** + +```json +{ + "terminal": { + "scroll_multiplier": 5.0 + } +} +``` + ## Terminal: Toolbar - Description: Whether or not to show various elements in the terminal toolbar. @@ -4049,7 +4165,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` **Options** -1. Use the current file's project directory. Will Fallback to the first project directory strategy if unsuccessful +1. Use the current file's project directory. Fallback to the first project directory strategy if unsuccessful. ```json [settings] { @@ -4059,7 +4175,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` } ``` -2. Use the first project in this workspace's directory. Will fallback to using this platform's home directory. +2. Use the first project in this workspace's directory. Fallback to using this platform's home directory. ```json [settings] { @@ -4069,7 +4185,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` } ``` -3. Always use this platform's home directory (if we can find it) +3. Always use this platform's home directory if it can be found. ```json [settings] { @@ -4093,6 +4209,53 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` } ``` +### Terminal: Path Hyperlink Regexes + +- Description: Regexes used to identify path hyperlinks. The regexes can be specified in two forms - a single regex string, or an array of strings (which will be collected into a single multi-line regex string). +- Setting: `path_hyperlink_regexes` +- Default: + +```json [settings] +{ + "terminal": { + "path_hyperlink_regexes": [ + // Python-style diagnostics + "File \"(?[^\"]+)\", line (?[0-9]+)", + // Common path syntax with optional line, column, description, trailing punctuation, or + // surrounding symbols or quotes + [ + "(?x)", + "# optionally starts with 0-2 opening prefix symbols", + "[({\\[<]{0,2}", + "# which may be followed by an opening quote", + "(?[\"'`])?", + "# `path` is the shortest sequence of any non-space character", + "(?(?[^ ]+?", + " # which may end with a line and optionally a column,", + " (?:+[0-9]+(:[0-9]+)?|:?\\([0-9]+([,:][0-9]+)?\\))?", + "))", + "# which must be followed by a matching quote", + "(?()\\k)", + "# and optionally a single closing symbol", + "[)}\\]>]?", + "# if line/column matched, may be followed by a description", + "(?():[^ 0-9][^ ]*)?", + "# which may be followed by trailing punctuation", + "[.,:)}\\]>]*", + "# and always includes trailing whitespace or end of line", + "([ ]+|$)" + ] + ] + } +} +``` + +### Terminal: Path Hyperlink Timeout (ms) + +- Description: Maximum time to search for a path hyperlink. When set to 0, path hyperlinks are disabled. +- Setting: `path_hyperlink_timeout_ms` +- Default: `1` + ## REPL - Description: Repl settings. @@ -4195,6 +4358,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a "show_project_items": true, "show_onboarding_banner": true, "show_user_picture": true, + "show_user_menu": true, "show_sign_in": true, "show_menus": false } @@ -4207,6 +4371,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - `show_project_items`: Whether to show the project host and name in the titlebar - `show_onboarding_banner`: Whether to show onboarding banners in the titlebar - `show_user_picture`: Whether to show user picture in the titlebar +- `show_user_menu`: Whether to show the user menu button in the titlebar (the one that displays your avatar by default and contains options like Settings, Keymap, Themes, etc.) - `show_sign_in`: Whether to show the sign in button in the titlebar - `show_menus`: Whether to show the menus in the titlebar @@ -4276,10 +4441,15 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a "indent_guides": { "show": "always" }, + "sort_mode": "directories_first", "hide_root": false, "hide_hidden": false, "starts_open": true, - "open_file_on_paste": true + "auto_open": { + "on_create": true, + "on_paste": true, + "on_drop": true + } } } ``` @@ -4488,6 +4658,58 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a } ``` +### Sort Mode + +- Description: Sort order for entries in the project panel +- Setting: `sort_mode` +- Default: `directories_first` + +**Options** + +1. Show directories first, then files + +```json [settings] +{ + "sort_mode": "directories_first" +} +``` + +2. Mix directories and files together + +```json [settings] +{ + "sort_mode": "mixed" +} +``` + +3. Show files first, then directories + +```json [settings] +{ + "sort_mode": "files_first" +} +``` + +### Auto Open + +- Description: Control whether files are opened automatically after different creation flows in the project panel. +- Setting: `auto_open` +- Default: + +```json [settings] +"auto_open": { + "on_create": true, + "on_paste": true, + "on_drop": true +} +``` + +**Options** + +- `on_create`: Whether to automatically open newly created files in the editor. +- `on_paste`: Whether to automatically open files after pasting or duplicating them. +- `on_drop`: Whether to automatically open files dropped from external sources. + ## Agent Visit [the Configuration page](./ai/configuration.md) under the AI section to learn more about all the agent-related settings. @@ -4567,6 +4789,34 @@ See the [debugger page](./debugger.md) for more information about debugging supp - `collapse_untracked_diff`: Whether to collapse untracked files in the diff panel - `scrollbar`: When to show the scrollbar in the git panel +## Git Hosting Providers + +- Description: Register self-hosted GitHub, GitLab, or Bitbucket instances so commit hashes, issue references, and permalinks resolve to the right host. +- Setting: `git_hosting_providers` +- Default: `[]` + +**Options** + +Each entry accepts: + +- `provider`: One of `github`, `gitlab`, or `bitbucket` +- `name`: Display name for the instance +- `base_url`: Base URL, e.g. `https://git.example.corp` + +You can define these in user or project settings; project settings are merged on top of user settings. + +```json [settings] +{ + "git_hosting_providers": [ + { + "provider": "github", + "name": "BigCorp GitHub", + "base_url": "https://git.example.corp" + } + ] +} +``` + ## Outline Panel - Description: Customize outline Panel @@ -4608,6 +4858,18 @@ See the [debugger page](./debugger.md) for more information about debugging supp }, ``` +## Colorize Brackets + +- Description: Whether to use tree-sitter bracket queries to detect and colorize the brackets in the editor (also known as "rainbow brackets"). +- Setting: `colorize_brackets` +- Default: `false` + +**Options** + +`boolean` values + +The colors that are used for different indentation levels are defined in the theme (theme key: `accents`). They can be customized by using theme overrides. + ## Unnecessary Code Fade - Description: How much to fade out unused code. diff --git a/docs/src/dev-containers.md b/docs/src/dev-containers.md new file mode 100644 index 0000000000000000000000000000000000000000..c87b204ee9cded48edb95752dd234fa55df71338 --- /dev/null +++ b/docs/src/dev-containers.md @@ -0,0 +1,50 @@ +# Dev Containers + +Dev Containers provide a consistent, reproducible development environment by defining your project's dependencies, tools, and settings in a container configuration. + +If your repository includes a `.devcontainer/devcontainer.json` file, Zed can open a project inside a development container. + +## Requirements + +- Docker must be installed and available in your `PATH`. Zed requires the `docker` command to be present. If you use Podman, you can alias it to `docker` (e.g., `alias docker=podman`). +- Your project must contain a `.devcontainer/devcontainer.json` directory/file. + +## Using Dev Containers in Zed + +### Automatic prompt + +When you open a project that contains the `.devcontainer/devcontainer.json` directory/file, Zed will display a prompt asking whether to open the project inside the dev container. Choosing "Open in Container" will: + +1. Build the dev container image (if needed). +2. Launch the container. +3. Reopen the project connected to the container environment. + +### Manual open + +If you dismiss the prompt or want to reopen the project inside a container later, you can use Zed's command palette to run the "Project: Open Remote" command and select the option to open the project in a dev container. +Alternatively, you can reach for the Remote Projects modal (through the {#kb projects::OpenRemote} binding) and choose the "Connect Dev Container" option. + +## Editing the dev container configuration + +If you modify `.devcontainer/devcontainer.json`, Zed does not currently rebuild or reload the container automatically. After changing configuration: + +- Stop or kill the existing container manually (e.g., via `docker kill `). +- Reopen the project in the container. + +## Working in a Dev Container + +Once connected, Zed operates inside the container environment for tasks, terminals, and language servers. +Files are linked from your workspace into the container according to the dev container specification. + +## Known Limitations + +> **Note:** This feature is still in development. + +- **Extensions:** Zed does not yet manage extensions separately for container environments. The host's extensions are used as-is. +- **Port forwarding:** Only the `appPort` field is supported. `forwardPorts` and other advanced port-forwarding features are not implemented. +- **Configuration changes:** Updates to `devcontainer.json` do not trigger automatic rebuilds or reloads; containers must be manually restarted. + +## See also + +- [Remote Development](./remote-development.md) for connecting to remote servers over SSH. +- [Tasks](./tasks.md) for running commands in the integrated terminal. diff --git a/docs/src/development/debuggers.md b/docs/src/development/debuggers.md index a5713f6c8aae1123e48ab6ab9f85f2147dfc7819..11f49390d41b89cfb1f527e1adabfd8b1b6d401a 100644 --- a/docs/src/development/debuggers.md +++ b/docs/src/development/debuggers.md @@ -5,7 +5,7 @@ ## Using Zed's built-in debugger -While the Zed project is open you can open the `New Process Modal` and select the `Debug` tab. There you can see to debug configurations to debug Zed with, one for GDB and one for LLDB. Select the configuration you want and Zed will build and launch the binary. +While the Zed project is open you can open the `New Process Modal` and select the `Debug` tab. There you can see two debug configurations to debug Zed with, one for GDB and one for LLDB. Select the configuration you want and Zed will build and launch the binary. Please note, GDB isn't supported on arm Macbooks diff --git a/docs/src/development/release-notes.md b/docs/src/development/release-notes.md index 5005fc32d36bafb57754e45423b45fc8b7bf64d9..90e1ad21b102de291f65894748f0abf11519a59f 100644 --- a/docs/src/development/release-notes.md +++ b/docs/src/development/release-notes.md @@ -10,7 +10,7 @@ Release Notes: - N/A _or_ Added/Fixed/Improved ... ``` -On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/releases) docs. +On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/release-notes) docs. The script outputs everything below the `Release Notes` line, including additional data such as the pull request author (if not a Zed team member) and a link to the pull request. If you use `N/A`, the script skips your pull request entirely. diff --git a/docs/src/extensions/agent-servers.md b/docs/src/extensions/agent-servers.md index ce6204e33ee0afd91d705cd90fe4134b9652f8be..c8367a8418d07f827258403587a9787779f55cb9 100644 --- a/docs/src/extensions/agent-servers.md +++ b/docs/src/extensions/agent-servers.md @@ -46,15 +46,25 @@ Each target must specify: - `archive`: URL to download the archive from (supports `.tar.gz`, `.zip`, etc.) - `cmd`: Command to run the agent server (relative to the extracted archive) - `args`: Command-line arguments to pass to the agent server (optional) +- `sha256`: SHA-256 hash string of the archive's bytes (optional, but recommended for security) +- `env`: Environment variables specific to this target (optional, overrides agent-level env vars with the same name) ### Optional Fields -You can also optionally specify: +You can also optionally specify at the agent server level: -- `sha256`: SHA-256 hash string of the archive's bytes. Zed will check this after the archive is downloaded and give an error if it doesn't match, so doing this improves security. -- `env`: Environment variables to set in the agent's spawned process. +- `env`: Environment variables to set in the agent's spawned process. These apply to all targets by default. - `icon`: Path to an SVG icon (relative to extension root) for display in menus. +### Environment Variables + +Environment variables can be configured at two levels: + +1. **Agent-level** (`[agent_servers.my-agent.env]`): Variables that apply to all platforms +2. **Target-level** (`[agent_servers.my-agent.targets.{platform}.env]`): Variables specific to a platform + +When both are specified, target-level environment variables override agent-level variables with the same name. Variables defined only at the agent level are inherited by all targets. + ### Complete Example Here's a more complete example with all optional fields: @@ -79,6 +89,9 @@ archive = "https://github.com/example/agent/releases/download/v2.0.0/agent-linux cmd = "./bin/agent" args = ["serve", "--port", "8080"] sha256 = "def456abc123..." + +[agent_servers.example-agent.targets.linux-x86_64.env] +AGENT_MEMORY_LIMIT = "2GB" # Linux-specific override ``` ## Installation Process diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index e86533c0c0c357147d291894167b149d99af5d1d..dc8a69329176c8dbb7f9785913ae4b7aac6fb230 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -165,7 +165,15 @@ To update an extension, open a PR to [the `zed-industries/extensions` repo](http In your PR do the following: -1. Update the extension's submodule to the commit of the new version. +1. Update the extension's submodule to the commit of the new version. For this, you can run + +```sh +# From the root of the repository: +git submodule update --remote extensions/your-extension-name +``` + +to update your extension to the latest commit available in your remote repository. + 2. Update the `version` field for the extension in `extensions.toml` - Make sure the `version` matches the one set in `extension.toml` at the particular commit. diff --git a/docs/src/extensions/installing-extensions.md b/docs/src/extensions/installing-extensions.md index 801fe5c55c0f47530e2656cd831619d1457ba13e..d9573556f0d4faeedbfdebbe72e51ad17fbfbb57 100644 --- a/docs/src/extensions/installing-extensions.md +++ b/docs/src/extensions/installing-extensions.md @@ -8,6 +8,7 @@ Here you can view the extensions that you currently have installed or search and - On macOS, extensions are installed in `~/Library/Application Support/Zed/extensions`. - On Linux, they are installed in either `$XDG_DATA_HOME/zed/extensions` or `~/.local/share/zed/extensions`. +- On Windows, the directory is `%LOCALAPPDATA%\Zed\extensions`. This directory contains two subdirectories: diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 7eb6a355dbfcafaa01ca885789d41e28c474d2f4..f3ffcd71ba8122956636cd1d228f885383cb83e6 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -154,6 +154,14 @@ This query identifies opening and closing brackets, braces, and quotation marks. | @open | Captures opening brackets, braces, and quotes | | @close | Captures closing brackets, braces, and quotes | +Zed uses these to highlight matching brackets: painting each bracket pair with a different color ("rainbow brackets") and highlighting the brackets if the cursor is inside the bracket pair. + +To opt out of rainbow brackets colorization, add the following to the corresponding `brackets.scm` entry: + +```scheme +(("\"" @open "\"" @close) (#set! rainbow.exclude)) +``` + ### Code outline/structure The `outline.scm` file defines the structure for the code outline. diff --git a/docs/src/git.md b/docs/src/git.md index 85781e37bc628ac493a048b5b7d16d1fbd758d72..8a94a79973b390f1d4e8075469b610d51b6f2016 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -29,6 +29,23 @@ In the panel you can see the state of your project at a glance—which repositor Zed monitors your repository so that changes you make on the command line are instantly reflected. +### Configuration + +You can configure how Zed hard wraps commit messages with the `preferred-line-length` setting of the "Git Commit" language. The default is `72`, but it can be set to any number of characters `0` or more. + +The Git Panel also allows configuring the `soft_wrap` setting to adjust how commit messages display while you are typing them in the Git Panel. The default setting is `editor_width`, however, `none`, `preferred_line_length`, and `bounded` are also options. + +#### Example + +```json +"languages": { + "Git Commit": { + "soft_wrap": "editor_width", + "preferred_line_length": 72 + }, +} +``` + ## Project Diff You can see all of the changes captured by Git in Zed by opening the Project Diff ({#kb git::Diff}), accessible via the {#action git::Diff} action in the Command Palette or the Git Panel. @@ -75,6 +92,12 @@ Zed offers two commit textareas: As soon as you commit in Zed, in the Git Panel, you'll see a bar right under the commit textarea, which will show the recently submitted commit. In there, you can use the "Uncommit" button, which performs the `git reset HEADˆ--soft` command. +### Configuring Commit Line Length + +By default, Zed sets the commit line length to `72` but it can be configured in your local `settings.json` file. + +Find more information about setting the `preferred-line-length` in the [Configuration](#configuration) section. + ## Stashing Git stash allows you to temporarily save your uncommitted changes and revert your working directory to a clean state. This is particularly useful when you need to quickly switch branches or pull updates without committing incomplete work. @@ -122,7 +145,6 @@ You can specify your preferred model to use by providing a `commit_message_model ```json [settings] { "agent": { - "version": "2", "commit_message_model": { "provider": "anthropic", "model": "claude-3-5-haiku" @@ -146,6 +168,20 @@ Zed currently supports links to the hosted versions of [SourceHut](https://sr.ht) and [Codeberg](https://codeberg.org). +For self-hosted GitHub, GitLab, or Bitbucket instances, add them to the `git_hosting_providers` setting so commit hashes and permalinks resolve to your domain: + +```json [settings] +{ + "git_hosting_providers": [ + { + "provider": "gitlab", + "name": "Corp GitLab", + "base_url": "https://git.example.corp" + } + ] +} +``` + Zed also has a Copy Permalink feature to create a permanent link to a code snippet on your Git hosting service. These links are useful for sharing a specific line or range of lines in a file at a specific commit. Trigger this action via the [Command Palette](./getting-started.md#command-palette) (search for `permalink`), diff --git a/docs/src/installation.md b/docs/src/installation.md index 7802ef7776a78deefb196ab005297e1f54314ea6..7d2009e3a0266160ce4e13056287c36ef7660008 100644 --- a/docs/src/installation.md +++ b/docs/src/installation.md @@ -22,6 +22,12 @@ brew install --cask zed@preview Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. +Additionally, you can install Zed using winget: + +```sh +winget install -e --id ZedIndustries.Zed +``` + ### Linux For most Linux users, the easiest way to install Zed is through our installation script: diff --git a/docs/src/languages/astro.md b/docs/src/languages/astro.md index 5691a0de4844b2e2d924713d523f4651da6fe984..cbfe8de74e7444e2e02f6240265e00eb043a2084 100644 --- a/docs/src/languages/astro.md +++ b/docs/src/languages/astro.md @@ -3,7 +3,7 @@ Astro support is available through the [Astro extension](https://github.com/zed-extensions/astro). - Tree-sitter: [virchau13/tree-sitter-astro](https://github.com/virchau13/tree-sitter-astro) -- Language Server: [withastro/language-tools](https://github.com/withastro/language-tools) +- Language Server: [withastro/language-tools](https://github.com/withastro/astro/tree/main/packages/language-tools/language-server) + +# Migration Research Notes + +## Completed Guides + +All three JetBrains migration guides have been populated with full content: + +1. **pycharm.md** - Python development, virtual environments, Ruff/Pyright, Django/Flask workflows +2. **webstorm.md** - JavaScript/TypeScript development, npm workflows, framework considerations +3. **rustrover.md** - Rust development, rust-analyzer parity, Cargo workflows, licensing notes + +## Key Sources Used + +- IntelliJ IDEA migration doc (structural template) +- JetBrains PyCharm Getting Started docs +- JetBrains WebStorm Getting Started docs +- JetBrains RustRover Quick Start Guide +- External community feedback (Reddit, Hacker News, Medium) + +## External Quotes Incorporated + +### WebStorm Guide + +> "I work for AWS and the applications I deal with are massive. Often I need to keep many projects open due to tight dependencies. I'm talking about complex microservices and micro frontend infrastructure which oftentimes lead to 2-15 minutes of indexing wait time whenever I open a project or build the system locally." + +### RustRover Guide + +- Noted rust-analyzer shared foundation between RustRover and Zed +- Addressed licensing/telemetry concerns that motivate some users to switch +- Included debugger caveats based on community feedback + +## Cross-Cutting Themes Applied to All Guides + +### Universal Pain Points Addressed + +1. Indexing (instant in Zed) +2. Resource usage (Zed is lightweight) +3. Startup time (Zed is near-instant) +4. UI clutter (Zed is minimal by design) + +### Universal Missing Features Documented + +- No project model / SDK management +- No database tools +- No framework-specific integration +- No visual run configurations (use tasks) +- No built-in HTTP client + +### JetBrains Keymap Emphasized + +All three guides emphasize: + +- Select JetBrains keymap during onboarding or in settings +- `Shift Shift` for Search Everywhere works +- Most familiar shortcuts preserved + +## Next Steps (Optional Enhancements) + +- [ ] Cross-link guides to JetBrains docs for users who want to reference original IDE features +- [ ] Add a consolidated "hub page" linking to all migration guides +- [ ] Consider adding VS Code migration guide using similar structure +- [ ] Review for tone consistency against Zed Documentation Guidelines diff --git a/docs/src/migrate/intellij.md b/docs/src/migrate/intellij.md new file mode 100644 index 0000000000000000000000000000000000000000..24c85774ec5686f605d1d781913d0873ac0abd7f --- /dev/null +++ b/docs/src/migrate/intellij.md @@ -0,0 +1,357 @@ +# How to Migrate from IntelliJ IDEA to Zed + +This guide covers how to set up Zed if you're coming from IntelliJ IDEA, including keybindings, settings, and the differences you should expect. + +## Install Zed + +Zed is available on macOS, Windows, and Linux. + +For macOS, you can download it from zed.dev/download, or install via Homebrew: + +```sh +brew install --cask zed +``` + +For Windows, download the installer from zed.dev/download, or install via winget: + +```sh +winget install Zed.Zed +``` + +For most Linux users, the easiest way to install Zed is through our installation script: + +```sh +curl -f https://zed.dev/install.sh | sh +``` + +After installation, you can launch Zed from your Applications folder (macOS), Start menu (Windows), or directly from the terminal using: +`zed .` +This opens the current directory in Zed. + +## Set Up the JetBrains Keymap + +If you're coming from IntelliJ, the fastest way to feel at home is to use the JetBrains keymap. During onboarding, you can select it as your base keymap. If you missed that step, you can change it anytime: + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Search for `Base Keymap` +3. Select `JetBrains` + +Or add this directly to your `settings.json`: + +```json +{ + "base_keymap": "JetBrains" +} +``` + +This maps familiar shortcuts like `Shift Shift` for Search Everywhere, `Cmd+O` for Go to Class, and `Cmd+Shift+A` for Find Action. + +## Set Up Editor Preferences + +You can configure settings manually in the Settings Editor. + +To edit your settings: + +1. `Cmd+,` to open the Settings Editor. +2. Run `zed: open settings` in the Command Palette. + +Settings IntelliJ users typically configure first: + +| Zed Setting | What it does | +| ----------------------- | ------------------------------------------------------------------------------- | +| `format_on_save` | Auto-format when saving. Set to `"on"` to enable. | +| `soft_wrap` | Wrap long lines. Options: `"none"`, `"editor_width"`, `"preferred_line_length"` | +| `preferred_line_length` | Column width for wrapping and rulers. Default is 80. | +| `inlay_hints` | Show parameter names and type hints inline, like IntelliJ's hints. | +| `relative_line_numbers` | Useful if you're coming from IdeaVim. | + +Zed also supports per-project settings. Create a `.zed/settings.json` file in your project root to override global settings for that project, similar to how you might use `.idea` folders in IntelliJ. + +> **Tip:** If you're joining an existing project, check `format_on_save` before making your first commit. Otherwise you might accidentally reformat an entire file when you only meant to change one line. + +## Open or Create a Project + +After setup, press `Cmd+Shift+O` (with JetBrains keymap) to open a folder. This becomes your workspace in Zed. Unlike IntelliJ, there's no project configuration wizard, no `.iml` files, and no SDK setup required. + +To start a new project, create a directory using your terminal or file manager, then open it in Zed. The editor will treat that folder as the root of your project. + +You can also launch Zed from the terminal inside any folder with: +`zed .` + +Once inside a project: + +- Use `Cmd+Shift+O` or `Cmd+E` to jump between files quickly (like IntelliJ's "Recent Files") +- Use `Cmd+Shift+A` or `Shift Shift` to open the Command Palette (like IntelliJ's "Search Everywhere") +- Use `Cmd+O` to search for symbols (like IntelliJ's "Go to Class") + +Open buffers appear as tabs across the top. The sidebar shows your file tree and Git status. Toggle it with `Cmd+1` (just like IntelliJ's Project tool window). + +## Differences in Keybindings + +If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference for how Zed compares to IntelliJ. + +### Common Shared Keybindings (Zed with JetBrains keymap ↔ IntelliJ) + +| Action | Shortcut | +| ----------------------------- | ----------------------- | +| Search Everywhere | `Shift Shift` | +| Find Action / Command Palette | `Cmd + Shift + A` | +| Go to File | `Cmd + Shift + O` | +| Go to Symbol / Class | `Cmd + O` | +| Recent Files | `Cmd + E` | +| Go to Definition | `Cmd + B` | +| Find Usages | `Alt + F7` | +| Rename Symbol | `Shift + F6` | +| Reformat Code | `Cmd + Alt + L` | +| Toggle Project Panel | `Cmd + 1` | +| Toggle Terminal | `Alt + F12` | +| Duplicate Line | `Cmd + D` | +| Delete Line | `Cmd + Backspace` | +| Move Line Up/Down | `Shift + Alt + Up/Down` | +| Expand/Shrink Selection | `Alt + Up/Down` | +| Comment Line | `Cmd + /` | +| Go Back / Forward | `Cmd + [` / `Cmd + ]` | +| Toggle Breakpoint | `Ctrl + F8` | + +### Different Keybindings (IntelliJ → Zed) + +| Action | IntelliJ | Zed (JetBrains keymap) | +| ---------------------- | ----------- | ------------------------ | +| File Structure | `Cmd + F12` | `Cmd + F12` (outline) | +| Navigate to Next Error | `F2` | `F2` | +| Run | `Ctrl + R` | `Ctrl + Alt + R` (tasks) | +| Debug | `Ctrl + D` | `Alt + Shift + F9` | +| Stop | `Cmd + F2` | `Ctrl + F2` | + +### Unique to Zed + +| Action | Shortcut | Notes | +| ----------------- | -------------------------- | ------------------------------ | +| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | + +### How to Customize Keybindings + +- Open the Command Palette (`Cmd+Shift+A` or `Shift Shift`) +- Run `Zed: Open Keymap Editor` + +This opens a list of all available bindings. You can override individual shortcuts or remove conflicts. + +Zed also supports key sequences (multi-key shortcuts). + +## Differences in User Interfaces + +### No Indexing + +If you've used IntelliJ on large projects, you know the wait: "Indexing..." can take anywhere from 30 seconds to 15 minutes depending on project size. IntelliJ builds a comprehensive index of your entire codebase to power its code intelligence, and it re-indexes when dependencies change or after builds. + +Zed doesn't index. You open a folder and start working immediately. File search and navigation work instantly regardless of project size. + +IntelliJ's index powers features like finding all usages across your entire codebase, understanding class hierarchies, and detecting dead code. Zed delegates this work to language servers, which may not analyze at the same depth. + +**How to adapt:** + +- For project-wide symbol search, use `Cmd+O` / Go to Symbol (relies on your language server) +- For finding files by name, use `Cmd+Shift+O` / Go to File +- For text search across files, use `Cmd+Shift+F`—this is fast even on large codebases +- If you need deep static analysis for JVM code, consider running IntelliJ's inspections as a separate step or using standalone tools like Checkstyle, PMD, or SpotBugs + +### LSP vs. Native Language Intelligence + +IntelliJ has its own language analysis engine built from scratch for each supported language. For Java, Kotlin, and other JVM languages, this engine understands your code thoroughly: it resolves types, tracks data flow, knows about framework annotations, and offers dozens of specialized refactorings. + +Zed uses the Language Server Protocol (LSP) for code intelligence. Each language has its own server: `jdtls` for Java, `rust-analyzer` for Rust, and so on. + +For some languages, the LSP experience is excellent. TypeScript, Rust, and Go have mature language servers that provide fast, accurate completions, diagnostics, and refactorings. For JVM languages, the gap might be more noticeable. The Eclipse-based Java language server is capable, but it won't match IntelliJ's depth for things like: + +- Spring and Jakarta EE annotation processing +- Complex refactorings (extract interface, pull members up, change signature with all callers) +- Framework-aware inspections +- Automatic import optimization with custom ordering rules + +**How to adapt:** + +- Use `Alt+Enter` for available code actions—the list will vary by language server +- For Java, ensure `jdtls` is properly configured with your JDK path in settings + +### No Project Model + +IntelliJ manages projects through `.idea` folders containing XML configuration files, `.iml` module definitions, SDK assignments, and run configurations. This model enables IntelliJ to understand multi-module projects, manage dependencies automatically, and persist complex run/debug setups. + +Zed has no project model. A project is a folder. There's no wizard, no SDK selection screen, no module configuration. + +This means: + +- Build commands are manual. Zed doesn't detect Maven or Gradle projects. +- Run configurations don't exist. You define tasks or use the terminal. +- SDK management is external. Your language server uses whatever JDK is on your PATH. +- There are no module boundaries. Zed sees folders, not project structure. + +**How to adapt:** + +- Create a `.zed/settings.json` in your project root for project-specific settings +- Define common commands in `tasks.json` (open via Command Palette: `zed: open tasks`): + +```json +[ + { + "label": "build", + "command": "./gradlew build" + }, + { + "label": "run", + "command": "./gradlew bootRun" + }, + { + "label": "test current file", + "command": "./gradlew test --tests $ZED_STEM" + } +] +``` + +- Use `Ctrl+Alt+R` to run tasks quickly +- Lean on your terminal (`Alt+F12`) for anything tasks don't cover +- For multi-module projects, you can open each module as a separate Zed window, or open the root and navigate via file finder + +### No Framework Integration + +IntelliJ's value for enterprise Java development comes largely from its framework integration. Spring beans are understood and navigable. JPA entities get special treatment. Endpoints are indexed and searchable. Jakarta EE annotations modify how the IDE analyzes your code. + +Zed has none of this. The language server sees Java code as Java code, so it doesn't understand that `@Autowired` means something special or that this class is a REST controller. + +Similarly for other ecosystems: no Rails integration, no Django awareness, no Angular/React-specific tooling beyond what the TypeScript language server provides. + +**How to adapt:** + +- Use grep and file search liberally. `Cmd+Shift+F` with a regex can find endpoint definitions, bean names, or annotation usages. +- Rely on your language server's "find references" (`Alt+F7`) for navigation—it works, just without framework context +- For Spring Boot, keep the Actuator endpoints or a separate tool for understanding bean wiring +- Consider using framework-specific CLI tools (Spring CLI, Rails generators) from Zed's terminal + +> **Tip:** For database work, pick up a dedicated tool like DataGrip, DBeaver, or TablePlus. Many developers who switch to Zed keep DataGrip around specifically for SQL—it integrates well with your existing JetBrains license. + +If your daily work depends heavily on framework-aware navigation and refactoring, you'll feel the gap. Zed works best when you're comfortable navigating code through search rather than specialized tooling, or when your language has strong LSP support that covers most of what you need. + +### Tool Windows vs. Docks + +IntelliJ organizes auxiliary views into numbered tool windows (Project = 1, Git = 9, Terminal = Alt+F12, etc.). Zed uses a similar concept called "docks": + +| IntelliJ Tool Window | Zed Equivalent | Shortcut (JetBrains keymap) | +| -------------------- | -------------- | --------------------------- | +| Project (1) | Project Panel | `Cmd + 1` | +| Git (9 or Cmd+0) | Git Panel | `Cmd + 0` | +| Terminal (Alt+F12) | Terminal Panel | `Alt + F12` | +| Structure (7) | Outline Panel | `Cmd + 7` | +| Problems (6) | Diagnostics | `Cmd + 6` | +| Debug (5) | Debug Panel | `Cmd + 5` | + +Zed has three dock positions: left, bottom, and right. Panels can be moved between docks by dragging or through settings. + +> **Tip:** IntelliJ has an "Override IDE shortcuts" setting that lets terminal shortcuts like `Ctrl+Left/Right` work normally. In Zed, terminal keybindings are separate—check your keymap if familiar shortcuts aren't working in the terminal panel. + +### Debugging + +Both IntelliJ and Zed offer integrated debugging, but the experience differs: + +- Zed's debugger uses the Debug Adapter Protocol (DAP), supporting multiple languages +- Set breakpoints with `Ctrl+F8` +- Start debugging with `Alt+Shift+F9` +- Step through code with `F7` (step into), `F8` (step over), `Shift+F8` (step out) +- Continue execution with `F9` + +The Debug Panel (`Cmd+5`) shows variables, call stack, and breakpoints—similar to IntelliJ's Debug tool window. + +### Extensions vs. Plugins + +IntelliJ has a massive plugin ecosystem covering everything from language support to database tools to deployment integrations. + +Zed's extension ecosystem is smaller and more focused: + +- Language support and syntax highlighting +- Themes +- Slash commands for AI +- Context servers + +Several features that require plugins in other editors are built into Zed: + +- Real-time collaboration with voice chat +- AI coding assistance +- Built-in terminal +- Task runner +- LSP-based code intelligence + +You won't find one-to-one replacements for every IntelliJ plugin, especially for framework-specific tools, database clients, or application server integrations. For those workflows, you may need to use external tools alongside Zed. + +## Collaboration in Zed vs. IntelliJ + +IntelliJ offers Code With Me as a separate plugin for collaboration. Zed has collaboration built into the core experience. + +- Open the Collab Panel in the left dock +- Create a channel and [invite your collaborators](https://zed.dev/docs/collaboration#inviting-a-collaborator) to join +- [Share your screen or your codebase](https://zed.dev/docs/collaboration#share-a-project) directly + +Once connected, you'll see each other's cursors, selections, and edits in real time. Voice chat is included. There's no need for separate tools or third-party logins. + +## Using AI in Zed + +If you're used to AI assistants in IntelliJ (like GitHub Copilot or JetBrains AI), Zed offers similar capabilities with more flexibility. + +### Configuring GitHub Copilot + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** + +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. + +### Additional AI Options + +To use other AI models in Zed, you have several options: + +- Use Zed's hosted models, with higher rate limits. Requires [authentication](https://zed.dev/docs/accounts.html) and subscription to [Zed Pro](https://zed.dev/docs/ai/subscription.html). +- Bring your own [API keys](https://zed.dev/docs/ai/llm-providers.html), no authentication needed +- Use [external agents like Claude Code](https://zed.dev/docs/ai/external-agents.html) + +## Advanced Config and Productivity Tweaks + +Zed exposes advanced settings for power users who want to fine-tune their environment. + +Here are a few useful tweaks: + +**Format on Save:** + +```json +"format_on_save": "on" +``` + +**Enable direnv support:** + +```json +"load_direnv": "shell_hook" +``` + +**Configure language servers**: For Java development, you may want to configure the Java language server in your settings: + +```json +{ + "lsp": { + "jdtls": { + "settings": { + "java_home": "/path/to/jdk" + } + } + } +} +``` + +## Next Steps + +Now that you're set up, here are some resources to help you get the most out of Zed: + +- [Configuring Zed](../configuring-zed.md) — Customize settings, themes, and editor behavior +- [Key Bindings](../key-bindings.md) — Learn how to customize and extend your keymap +- [Tasks](../tasks.md) — Set up build and run commands for your projects +- [AI Features](../ai/overview.md) — Explore Zed's AI capabilities beyond code completion +- [Collaboration](../collaboration/overview.md) — Share your projects and code together in real time +- [Languages](../languages.md) — Language-specific setup guides, including Java and Kotlin diff --git a/docs/src/migrate/pycharm.md b/docs/src/migrate/pycharm.md new file mode 100644 index 0000000000000000000000000000000000000000..636bc69eeba1c09b3e0e8a0d74ccd859aedbb342 --- /dev/null +++ b/docs/src/migrate/pycharm.md @@ -0,0 +1,438 @@ +# How to Migrate from PyCharm to Zed + +This guide covers how to set up Zed if you're coming from PyCharm, including keybindings, settings, and the differences you should expect. + +## Install Zed + +Zed is available on macOS, Windows, and Linux. + +For macOS, you can download it from zed.dev/download, or install via Homebrew: + +```sh +brew install --cask zed +``` + +For Windows, download the installer from zed.dev/download, or install via winget: + +```sh +winget install Zed.Zed +``` + +For most Linux users, the easiest way to install Zed is through our installation script: + +```sh +curl -f https://zed.dev/install.sh | sh +``` + +After installation, you can launch Zed from your Applications folder (macOS), Start menu (Windows), or directly from the terminal using: +`zed .` +This opens the current directory in Zed. + +## Set Up the JetBrains Keymap + +If you're coming from PyCharm, the fastest way to feel at home is to use the JetBrains keymap. During onboarding, you can select it as your base keymap. If you missed that step, you can change it anytime: + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Search for `Base Keymap` +3. Select `JetBrains` + +Or add this directly to your `settings.json`: + +```json +{ + "base_keymap": "JetBrains" +} +``` + +This maps familiar shortcuts like `Shift Shift` for Search Everywhere, `Cmd+O` for Go to Class, and `Cmd+Shift+A` for Find Action. + +## Set Up Editor Preferences + +You can configure settings manually in the Settings Editor. + +To edit your settings: + +1. `Cmd+,` to open the Settings Editor. +2. Run `zed: open settings` in the Command Palette. + +Settings PyCharm users typically configure first: + +| Zed Setting | What it does | +| ----------------------- | ------------------------------------------------------------------------------- | +| `format_on_save` | Auto-format when saving. Set to `"on"` to enable. | +| `soft_wrap` | Wrap long lines. Options: `"none"`, `"editor_width"`, `"preferred_line_length"` | +| `preferred_line_length` | Column width for wrapping and rulers. Default is 80, PEP 8 recommends 79. | +| `inlay_hints` | Show parameter names and type hints inline, like PyCharm's hints. | +| `relative_line_numbers` | Useful if you're coming from IdeaVim. | + +Zed also supports per-project settings. Create a `.zed/settings.json` file in your project root to override global settings for that project, similar to how you might use `.idea` folders in PyCharm. + +> **Tip:** If you're joining an existing project, check `format_on_save` before making your first commit. Otherwise you might accidentally reformat an entire file when you only meant to change one line. + +## Open or Create a Project + +After setup, press `Cmd+Shift+O` (with JetBrains keymap) to open a folder. This becomes your workspace in Zed. Unlike PyCharm, there's no project configuration wizard, no interpreter selection dialog, and no project structure setup required. + +To start a new project, create a directory using your terminal or file manager, then open it in Zed. The editor will treat that folder as the root of your project. + +You can also launch Zed from the terminal inside any folder with: +`zed .` + +Once inside a project: + +- Use `Cmd+Shift+O` or `Cmd+E` to jump between files quickly (like PyCharm's "Recent Files") +- Use `Cmd+Shift+A` or `Shift Shift` to open the Command Palette (like PyCharm's "Search Everywhere") +- Use `Cmd+O` to search for symbols (like PyCharm's "Go to Symbol") + +Open buffers appear as tabs across the top. The sidebar shows your file tree and Git status. Toggle it with `Cmd+1` (just like PyCharm's Project tool window). + +## Differences in Keybindings + +If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference for how Zed compares to PyCharm. + +### Common Shared Keybindings + +| Action | Shortcut | +| ----------------------------- | ----------------------- | +| Search Everywhere | `Shift Shift` | +| Find Action / Command Palette | `Cmd + Shift + A` | +| Go to File | `Cmd + Shift + O` | +| Go to Symbol | `Cmd + O` | +| Recent Files | `Cmd + E` | +| Go to Definition | `Cmd + B` | +| Find Usages | `Alt + F7` | +| Rename Symbol | `Shift + F6` | +| Reformat Code | `Cmd + Alt + L` | +| Toggle Project Panel | `Cmd + 1` | +| Toggle Terminal | `Alt + F12` | +| Duplicate Line | `Cmd + D` | +| Delete Line | `Cmd + Backspace` | +| Move Line Up/Down | `Shift + Alt + Up/Down` | +| Expand/Shrink Selection | `Alt + Up/Down` | +| Comment Line | `Cmd + /` | +| Go Back / Forward | `Cmd + [` / `Cmd + ]` | +| Toggle Breakpoint | `Ctrl + F8` | + +### Different Keybindings (PyCharm → Zed) + +| Action | PyCharm | Zed (JetBrains keymap) | +| ---------------------- | ----------- | ------------------------ | +| File Structure | `Cmd + F12` | `Cmd + F12` (outline) | +| Navigate to Next Error | `F2` | `F2` | +| Run | `Ctrl + R` | `Ctrl + Alt + R` (tasks) | +| Debug | `Ctrl + D` | `Alt + Shift + F9` | +| Stop | `Cmd + F2` | `Ctrl + F2` | + +### Unique to Zed + +| Action | Shortcut | Notes | +| ----------------- | -------------------------- | ------------------------------ | +| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | + +### How to Customize Keybindings + +- Open the Command Palette (`Cmd+Shift+A` or `Shift Shift`) +- Run `Zed: Open Keymap Editor` + +This opens a list of all available bindings. You can override individual shortcuts or remove conflicts. + +Zed also supports key sequences (multi-key shortcuts). + +## Differences in User Interfaces + +### No Indexing + +If you've used PyCharm on large projects, you know the wait: "Indexing..." can take anywhere from 30 seconds to several minutes depending on project size and dependencies. PyCharm builds a comprehensive index of your entire codebase to power its code intelligence, and it re-indexes when dependencies change or when you install new packages. + +Zed doesn't index. You open a folder and start working immediately. File search and navigation work instantly regardless of project size. For many PyCharm users, this alone is reason enough to switch—no more waiting, no more "Indexing paused" interruptions. + +PyCharm's index powers features like finding all usages across your entire codebase, understanding class hierarchies, and detecting unused imports project-wide. Zed delegates this work to language servers, which may not analyze as deeply or as broadly. + +**How to adapt:** + +- For project-wide symbol search, use `Cmd+O` / Go to Symbol (relies on your language server) +- For finding files by name, use `Cmd+Shift+O` / Go to File +- For text search across files, use `Cmd+Shift+F`—this is fast even on large codebases +- For deep static analysis, consider running tools like `mypy`, `pylint`, or `ruff check` from the terminal + +### LSP vs. Native Language Intelligence + +PyCharm has its own language analysis engine built specifically for Python. This engine understands your code deeply: it resolves types without annotations, tracks data flow, knows about Django models and Flask routes, and offers specialized refactorings. + +Zed uses the Language Server Protocol (LSP) for code intelligence. For Python, Zed provides several language servers out of the box: + +- **basedpyright** (default) — Fast type checking and completions +- **Ruff** (default) — Linting and formatting +- **Ty** — Up-and-coming language server from Astral, built for speed +- **Pyright** — Microsoft's type checker +- **PyLSP** — Plugin-based server with tool integrations + +The LSP experience for Python is strong. basedpyright provides accurate completions, type checking, and navigation. Ruff handles formatting and linting with excellent performance. + +Where you might notice differences: + +- Framework-specific intelligence (Django ORM, Flask routes) isn't built-in +- Some complex refactorings (extract method with proper scope analysis) may be less sophisticated +- Auto-import suggestions depend on what the language server knows about your environment + +**How to adapt:** + +- Use `Alt+Enter` for available code actions—the list will vary by language server +- Ensure your virtual environment is selected so the language server can resolve your dependencies +- Use Ruff for fast, consistent formatting (it's enabled by default) +- For code inspection similar to PyCharm's "Inspect Code," run `ruff check .` or check the Diagnostics panel (`Cmd+6`)—basedpyright and Ruff together catch many of the same issues + +### Virtual Environments and Interpreters + +In PyCharm, you select a Python interpreter through a GUI, and PyCharm manages the connection between your project and that interpreter. It shows available packages, lets you install new ones, and keeps track of which environment each project uses. + +Zed handles virtual environments through its toolchain system: + +- Zed automatically discovers virtual environments in common locations (`.venv`, `venv`, `.env`, `env`) +- When a virtual environment is detected, the terminal auto-activates it +- Language servers are automatically configured to use the discovered environment +- You can manually select a toolchain if auto-detection picks the wrong one + +**How to adapt:** + +- Create your virtual environment with `python -m venv .venv` or `uv sync` +- Open the folder in Zed—it will detect the environment automatically +- If you need to switch environments, use the toolchain selector +- For conda environments, ensure they're activated in your shell before launching Zed + +> **Tip:** If basedpyright shows import errors for packages you've installed, check that Zed has selected the correct virtual environment. Use the toolchain selector to verify or change the active environment. + +### No Project Model + +PyCharm manages projects through `.idea` folders containing XML configuration files, interpreter assignments, and run configurations. This model lets PyCharm remember your interpreter choice, manage dependencies through the UI, and persist complex run/debug setups. + +Zed has no project model. A project is a folder. There's no wizard, no interpreter selection screen, no project structure configuration. + +This means: + +- Run configurations don't exist. You define tasks or use the terminal. Your existing PyCharm run configs in `.idea/` won't be read—you'll recreate the ones you need in `tasks.json`. +- Interpreter management is external. Zed discovers environments but doesn't create them. +- Dependencies are managed through pip, uv, poetry, or conda—not through the editor. +- There's no Python Console (interactive REPL) panel. Use `python` or `ipython` in the terminal instead. + +**How to adapt:** + +- Create a `.zed/settings.json` in your project root for project-specific settings +- Define common commands in `tasks.json` (open via Command Palette: `zed: open tasks`): + +```json +[ + { + "label": "run", + "command": "python main.py" + }, + { + "label": "test", + "command": "pytest" + }, + { + "label": "test current file", + "command": "pytest $ZED_FILE" + } +] +``` + +- Use `Ctrl+Alt+R` to run tasks quickly +- Lean on your terminal (`Alt+F12`) for anything tasks don't cover + +### No Framework Integration + +PyCharm Professional's value for web development comes largely from its framework integration. Django templates are understood and navigable. Flask routes are indexed. SQLAlchemy models get special treatment. Template variables autocomplete. + +Zed has none of this. The language server sees Python code as Python code—it doesn't understand that `@app.route` defines an endpoint or that a Django model class creates database tables. + +**How to adapt:** + +- Use grep and file search liberally. `Cmd+Shift+F` with a regex can find route definitions, model classes, or template usages. +- Rely on your language server's "find references" (`Alt+F7`) for navigation—it works, just without framework context +- Consider using framework-specific CLI tools (`python manage.py`, `flask routes`) from Zed's terminal + +> **Tip:** For database work, pick up a dedicated tool like DataGrip, DBeaver, or TablePlus. Many developers who switch to Zed keep DataGrip around specifically for SQL. + +### Tool Windows vs. Docks + +PyCharm organizes auxiliary views into numbered tool windows (Project = 1, Python Console = 4, Terminal = Alt+F12, etc.). Zed uses a similar concept called "docks": + +| PyCharm Tool Window | Zed Equivalent | Shortcut (JetBrains keymap) | +| ------------------- | -------------- | --------------------------- | +| Project (1) | Project Panel | `Cmd + 1` | +| Git (9 or Cmd+0) | Git Panel | `Cmd + 0` | +| Terminal (Alt+F12) | Terminal Panel | `Alt + F12` | +| Structure (7) | Outline Panel | `Cmd + 7` | +| Problems (6) | Diagnostics | `Cmd + 6` | +| Debug (5) | Debug Panel | `Cmd + 5` | + +Zed has three dock positions: left, bottom, and right. Panels can be moved between docks by dragging or through settings. + +### Debugging + +Both PyCharm and Zed offer integrated debugging, but the experience differs: + +- Zed uses `debugpy` (the same debug adapter that VS Code uses) +- Set breakpoints with `Ctrl+F8` +- Start debugging with `Alt+Shift+F9` or press `F4` and select a debug target +- Step through code with `F7` (step into), `F8` (step over), `Shift+F8` (step out) +- Continue execution with `F9` + +Zed can automatically detect debuggable entry points. Press `F4` to see available options, including: + +- Python scripts +- Modules +- pytest tests + +For more control, create a `.zed/debug.json` file: + +```json +[ + { + "label": "Debug Current File", + "adapter": "Debugpy", + "program": "$ZED_FILE", + "request": "launch" + }, + { + "label": "Debug Flask App", + "adapter": "Debugpy", + "request": "launch", + "module": "flask", + "args": ["run", "--debug"], + "env": { + "FLASK_APP": "app.py" + } + } +] +``` + +### Running Tests + +PyCharm has a dedicated test runner with a visual interface showing pass/fail status for each test. Zed provides test running through: + +- **Gutter icons** — Click the play button next to test functions or classes +- **Tasks** — Define pytest or unittest commands in `tasks.json` +- **Terminal** — Run `pytest` directly + +The test output appears in the terminal panel. For pytest, use `--tb=short` for concise tracebacks or `-v` for verbose output. + +### Extensions vs. Plugins + +PyCharm has a plugin ecosystem covering everything from additional language support to database tools to deployment integrations. + +Zed's extension ecosystem is smaller and more focused: + +- Language support and syntax highlighting +- Themes +- Slash commands for AI +- Context servers + +Several features that require plugins in PyCharm are built into Zed: + +- Real-time collaboration with voice chat +- AI coding assistance +- Built-in terminal +- Task runner +- LSP-based code intelligence +- Ruff formatting and linting + +### What's Not in Zed + +To set expectations clearly, here's what PyCharm offers that Zed doesn't have: + +- **Scientific Mode / Jupyter integration** — For notebooks and data science workflows, use JupyterLab or VS Code with the Jupyter extension alongside Zed for your Python editing +- **Database tools** — Use DataGrip, DBeaver, or TablePlus +- **Django/Flask template navigation** — Use file search and grep +- **Visual package manager** — Use pip, uv, or poetry from the terminal +- **Remote interpreters** — Zed has remote development, but it works differently +- **Profiler integration** — Use cProfile, py-spy, or similar tools externally + +## Collaboration in Zed vs. PyCharm + +PyCharm offers Code With Me as a separate plugin for collaboration. Zed has collaboration built into the core experience. + +- Open the Collab Panel in the left dock +- Create a channel and [invite your collaborators](https://zed.dev/docs/collaboration#inviting-a-collaborator) to join +- [Share your screen or your codebase](https://zed.dev/docs/collaboration#share-a-project) directly + +Once connected, you'll see each other's cursors, selections, and edits in real time. Voice chat is included. There's no need for separate tools or third-party logins. + +## Using AI in Zed + +If you're used to AI assistants in PyCharm (like GitHub Copilot or JetBrains AI Assistant), Zed offers similar capabilities with more flexibility. + +### Configuring GitHub Copilot + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** + +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. + +### Additional AI Options + +To use other AI models in Zed, you have several options: + +- Use Zed's hosted models, with higher rate limits. Requires [authentication](https://zed.dev/docs/accounts.html) and subscription to [Zed Pro](https://zed.dev/docs/ai/subscription.html). +- Bring your own [API keys](https://zed.dev/docs/ai/llm-providers.html), no authentication needed +- Use [external agents like Claude Code](https://zed.dev/docs/ai/external-agents.html) + +## Advanced Config and Productivity Tweaks + +Zed exposes advanced settings for power users who want to fine-tune their environment. + +Here are a few useful tweaks: + +**Format on Save:** + +```json +"format_on_save": "on" +``` + +**Enable direnv support (useful for Python projects using direnv):** + +```json +"load_direnv": "shell_hook" +``` + +**Customize virtual environment detection:** + +```json +{ + "terminal": { + "detect_venv": { + "on": { + "directories": [".venv", "venv", ".env", "env"], + "activate_script": "default" + } + } + } +} +``` + +**Configure basedpyright type checking strictness:** + +If you find basedpyright too strict or too lenient, configure it in your project's `pyrightconfig.json`: + +```json +{ + "typeCheckingMode": "basic" +} +``` + +Options are `"off"`, `"basic"`, `"standard"` (default), `"strict"`, or `"all"`. + +## Next Steps + +Now that you're set up, here are some resources to help you get the most out of Zed: + +- [Configuring Zed](../configuring-zed.md) — Customize settings, themes, and editor behavior +- [Key Bindings](../key-bindings.md) — Learn how to customize and extend your keymap +- [Tasks](../tasks.md) — Set up build and run commands for your projects +- [AI Features](../ai/overview.md) — Explore Zed's AI capabilities beyond code completion +- [Collaboration](../collaboration/overview.md) — Share your projects and code together in real time +- [Python in Zed](../languages/python.md) — Python-specific setup and configuration diff --git a/docs/src/migrate/rustrover.md b/docs/src/migrate/rustrover.md new file mode 100644 index 0000000000000000000000000000000000000000..4d0e85cfe9b981243044290929070e87876987d3 --- /dev/null +++ b/docs/src/migrate/rustrover.md @@ -0,0 +1,501 @@ +# How to Migrate from RustRover to Zed + +This guide covers how to set up Zed if you're coming from RustRover, including keybindings, settings, and the differences you should expect as a Rust developer. + +## Install Zed + +Zed is available on macOS, Windows, and Linux. + +For macOS, you can download it from zed.dev/download, or install via Homebrew: + +```sh +brew install --cask zed +``` + +For Windows, download the installer from zed.dev/download, or install via winget: + +```sh +winget install Zed.Zed +``` + +For most Linux users, the easiest way to install Zed is through our installation script: + +```sh +curl -f https://zed.dev/install.sh | sh +``` + +After installation, you can launch Zed from your Applications folder (macOS), Start menu (Windows), or directly from the terminal using: +`zed .` +This opens the current directory in Zed. + +## Set Up the JetBrains Keymap + +If you're coming from RustRover, the fastest way to feel at home is to use the JetBrains keymap. During onboarding, you can select it as your base keymap. If you missed that step, you can change it anytime: + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Search for `Base Keymap` +3. Select `JetBrains` + +Or add this directly to your `settings.json`: + +```json +{ + "base_keymap": "JetBrains" +} +``` + +This maps familiar shortcuts like `Shift Shift` for Search Everywhere, `Cmd+O` for Go to Class, and `Cmd+Shift+A` for Find Action. + +## Set Up Editor Preferences + +You can configure settings manually in the Settings Editor. + +To edit your settings: + +1. `Cmd+,` to open the Settings Editor. +2. Run `zed: open settings` in the Command Palette. + +Settings RustRover users typically configure first: + +| Zed Setting | What it does | +| ----------------------- | ------------------------------------------------------------------------------- | +| `format_on_save` | Auto-format when saving. Set to `"on"` to enable (uses rustfmt by default). | +| `soft_wrap` | Wrap long lines. Options: `"none"`, `"editor_width"`, `"preferred_line_length"` | +| `preferred_line_length` | Column width for wrapping and rulers. Rust convention is 100. | +| `inlay_hints` | Show type hints, parameter names, and chaining hints inline. | +| `relative_line_numbers` | Useful if you're coming from IdeaVim. | + +Zed also supports per-project settings. Create a `.zed/settings.json` file in your project root to override global settings for that project, similar to how you might use `.idea` folders in RustRover. + +> **Tip:** If you're joining an existing project, check `format_on_save` before making your first commit. Otherwise you might accidentally reformat an entire file when you only meant to change one line. + +## Open or Create a Project + +After setup, press `Cmd+Shift+O` (with JetBrains keymap) to open a folder. This becomes your workspace in Zed. Unlike RustRover, there's no project configuration wizard, no toolchain selection dialog, and no Cargo project setup screen. + +To start a new project, use Cargo from the terminal: + +```sh +cargo new my_project +cd my_project +zed . +``` + +Or for a library: + +```sh +cargo new --lib my_library +``` + +You can also launch Zed from the terminal inside any existing Cargo project with: +`zed .` + +Once inside a project: + +- Use `Cmd+Shift+O` or `Cmd+E` to jump between files quickly (like RustRover's "Recent Files") +- Use `Cmd+Shift+A` or `Shift Shift` to open the Command Palette (like RustRover's "Search Everywhere") +- Use `Cmd+O` to search for symbols (like RustRover's "Go to Symbol") + +Open buffers appear as tabs across the top. The sidebar shows your file tree and Git status. Toggle it with `Cmd+1` (just like RustRover's Project tool window). + +## Differences in Keybindings + +If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference for how Zed compares to RustRover. + +### Common Shared Keybindings + +| Action | Shortcut | +| ----------------------------- | ----------------------- | +| Search Everywhere | `Shift Shift` | +| Find Action / Command Palette | `Cmd + Shift + A` | +| Go to File | `Cmd + Shift + O` | +| Go to Symbol | `Cmd + O` | +| Recent Files | `Cmd + E` | +| Go to Definition | `Cmd + B` | +| Find Usages | `Alt + F7` | +| Rename Symbol | `Shift + F6` | +| Reformat Code | `Cmd + Alt + L` | +| Toggle Project Panel | `Cmd + 1` | +| Toggle Terminal | `Alt + F12` | +| Duplicate Line | `Cmd + D` | +| Delete Line | `Cmd + Backspace` | +| Move Line Up/Down | `Shift + Alt + Up/Down` | +| Expand/Shrink Selection | `Alt + Up/Down` | +| Comment Line | `Cmd + /` | +| Go Back / Forward | `Cmd + [` / `Cmd + ]` | +| Toggle Breakpoint | `Ctrl + F8` | + +### Different Keybindings (RustRover → Zed) + +| Action | RustRover | Zed (JetBrains keymap) | +| ---------------------- | ----------- | ------------------------ | +| File Structure | `Cmd + F12` | `Cmd + F12` (outline) | +| Navigate to Next Error | `F2` | `F2` | +| Run | `Ctrl + R` | `Ctrl + Alt + R` (tasks) | +| Debug | `Ctrl + D` | `Alt + Shift + F9` | +| Stop | `Cmd + F2` | `Ctrl + F2` | +| Expand Macro | `Alt+Enter` | `Cmd + Shift + M` | + +### Unique to Zed + +| Action | Shortcut | Notes | +| ----------------- | -------------------------- | ------------------------------ | +| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | + +### How to Customize Keybindings + +- Open the Command Palette (`Cmd+Shift+A` or `Shift Shift`) +- Run `Zed: Open Keymap Editor` + +This opens a list of all available bindings. You can override individual shortcuts or remove conflicts. + +Zed also supports key sequences (multi-key shortcuts). + +## Differences in User Interfaces + +### No Indexing + +RustRover indexes your project when you first open it to build a model of your codebase. This process runs whenever you open a project or when dependencies change via Cargo. + +Zed skips the indexing step. You open a folder and start working right away. Since both editors rely on rust-analyzer for Rust intelligence, the analysis still happens—but in Zed it runs in the background without blocking the UI or showing modal progress dialogs. + +**How to adapt:** + +- Use `Cmd+O` to search symbols across your crate (rust-analyzer handles this) +- Jump to files by name with `Cmd+Shift+O` +- `Cmd+Shift+F` gives you fast text search across the entire project +- For linting and deeper checks, run `cargo clippy` in the terminal + +### rust-analyzer: Shared Foundation, Different Integration + +Here's what makes the RustRover-to-Zed transition unique: **both editors use rust-analyzer** for Rust language intelligence. This means the core code analysis—completions, go-to-definition, find references, type inference—is fundamentally the same. + +RustRover integrates rust-analyzer into its JetBrains platform, adding a GUI layer, additional refactorings, and its own indexing on top. Zed uses rust-analyzer more directly through the Language Server Protocol (LSP). + +What this means for you: + +- **Completions** — Same quality, powered by rust-analyzer +- **Type inference** — Identical, it's the same engine +- **Go to definition / Find usages** — Works the same way +- **Macro expansion** — Available in both (use `Cmd+Shift+M` in Zed) +- **Inlay hints** — Both support type hints, parameter hints, and chaining hints + +Where you might notice differences: + +- Some refactorings available in RustRover may not have rust-analyzer equivalents +- RustRover's GUI for configuring rust-analyzer is replaced by JSON configuration in Zed +- RustRover-specific inspections (beyond Clippy) won't exist in Zed + +**How to adapt:** + +- Use `Alt+Enter` for available code actions—rust-analyzer provides many +- Configure rust-analyzer settings in `.zed/settings.json` for project-specific needs +- Run `cargo clippy` for linting (it integrates with rust-analyzer diagnostics) + +### No Project Model + +RustRover manages projects through `.idea` folders containing XML configuration files, toolchain assignments, and run configurations. The Cargo tool window provides a visual interface for your project structure, targets, and dependencies. + +Zed keeps it simpler: a project is a folder with a `Cargo.toml`. No project wizard, no toolchain dialogs, no visual Cargo management layer. + +In practice: + +- Run configurations don't carry over. Your `.idea/` setup stays behind—define the commands you need in `tasks.json` instead. +- Toolchains are managed externally via `rustup`. +- Dependencies live in `Cargo.toml`. Edit the file directly; rust-analyzer provides completions for crate names and versions. + +**How to adapt:** + +- Create a `.zed/settings.json` in your project root for project-specific settings +- Define common commands in `tasks.json` (open via Command Palette: `zed: open tasks`): + +```json +[ + { + "label": "cargo run", + "command": "cargo run" + }, + { + "label": "cargo build", + "command": "cargo build" + }, + { + "label": "cargo test", + "command": "cargo test" + }, + { + "label": "cargo clippy", + "command": "cargo clippy" + }, + { + "label": "cargo run --release", + "command": "cargo run --release" + } +] +``` + +- Use `Ctrl+Alt+R` to run tasks quickly +- Lean on your terminal (`Alt+F12`) for anything tasks don't cover + +### No Cargo Integration UI + +RustRover's Cargo tool window provides visual access to your project's targets, dependencies, and common Cargo commands. You can run builds, tests, and benchmarks with a click. + +Zed doesn't have a Cargo GUI. You work with Cargo through: + +- **Terminal** — Run any Cargo command directly +- **Tasks** — Define shortcuts for common commands +- **Gutter icons** — Run tests and binaries with clickable icons + +**How to adapt:** + +- Get comfortable with Cargo CLI commands: `cargo build`, `cargo run`, `cargo test`, `cargo clippy`, `cargo doc` +- Use tasks for commands you run frequently +- For dependency management, edit `Cargo.toml` directly (rust-analyzer provides completions for crate names and versions) + +### Tool Windows vs. Docks + +RustRover organizes auxiliary views into numbered tool windows (Project = 1, Cargo = Alt+1, Terminal = Alt+F12, etc.). Zed uses a similar concept called "docks": + +| RustRover Tool Window | Zed Equivalent | Shortcut (JetBrains keymap) | +| --------------------- | -------------- | --------------------------- | +| Project (1) | Project Panel | `Cmd + 1` | +| Git (9 or Cmd+0) | Git Panel | `Cmd + 0` | +| Terminal (Alt+F12) | Terminal Panel | `Alt + F12` | +| Structure (7) | Outline Panel | `Cmd + 7` | +| Problems (6) | Diagnostics | `Cmd + 6` | +| Debug (5) | Debug Panel | `Cmd + 5` | + +Zed has three dock positions: left, bottom, and right. Panels can be moved between docks by dragging or through settings. + +Note that there's no dedicated Cargo tool window in Zed. Use the terminal or define tasks for your common Cargo commands. + +### Debugging + +Both RustRover and Zed offer integrated debugging for Rust, but using different backends: + +- RustRover uses its own debugger integration +- Zed uses **CodeLLDB** (the same debug adapter popular in VS Code) + +To debug Rust code in Zed: + +- Set breakpoints with `Ctrl+F8` +- Start debugging with `Alt+Shift+F9` or press `F4` and select a debug target +- Step through code with `F7` (step into), `F8` (step over), `Shift+F8` (step out) +- Continue execution with `F9` + +Zed can automatically detect debuggable targets in your Cargo project. Press `F4` to see available options. + +For more control, create a `.zed/debug.json` file: + +```json +[ + { + "label": "Debug Binary", + "adapter": "CodeLLDB", + "request": "launch", + "program": "${workspaceFolder}/target/debug/my_project" + }, + { + "label": "Debug Tests", + "adapter": "CodeLLDB", + "request": "launch", + "cargo": { + "args": ["test", "--no-run"], + "filter": { + "kind": "test" + } + } + }, + { + "label": "Debug with Arguments", + "adapter": "CodeLLDB", + "request": "launch", + "program": "${workspaceFolder}/target/debug/my_project", + "args": ["--config", "dev.toml"] + } +] +``` + +> **Note:** Some users have reported that RustRover's debugger can have issues with variable inspection and breakpoints in certain scenarios. CodeLLDB in Zed provides a solid alternative, though debugging Rust can be challenging in any editor due to optimizations and macro-generated code. + +### Running Tests + +RustRover has a dedicated test runner with a visual interface showing pass/fail status for each test. Zed provides test running through: + +- **Gutter icons** — Click the play button next to `#[test]` functions or test modules +- **Tasks** — Define `cargo test` commands in `tasks.json` +- **Terminal** — Run `cargo test` directly + +The test output appears in the terminal panel. For more detailed output, use: + +- `cargo test -- --nocapture` to see println! output +- `cargo test -- --test-threads=1` for sequential test execution +- `cargo test specific_test_name` to run a single test + +### Extensions vs. Plugins + +RustRover has a plugin ecosystem, though it's more limited than other JetBrains IDEs since Rust support is built-in. + +Zed's extension ecosystem is smaller and more focused: + +- Language support and syntax highlighting +- Themes +- Slash commands for AI +- Context servers + +Several features that might require plugins in other editors are built into Zed: + +- Real-time collaboration with voice chat +- AI coding assistance +- Built-in terminal +- Task runner +- rust-analyzer integration +- rustfmt formatting + +### What's Not in Zed + +To set expectations clearly, here's what RustRover offers that Zed doesn't have: + +- **Cargo.toml GUI editor** — Edit the file directly (rust-analyzer helps with completions) +- **Visual dependency management** — Use `cargo add`, `cargo remove`, or edit `Cargo.toml` +- **Profiler integration** — Use `cargo flamegraph`, `perf`, or external profiling tools +- **Database tools** — Use DataGrip, DBeaver, or TablePlus +- **HTTP Client** — Use tools like `curl`, `httpie`, or Postman +- **Coverage visualization** — Use `cargo tarpaulin` or `cargo llvm-cov` externally + +## A Note on Licensing and Telemetry + +If you're moving from RustRover partly due to licensing concerns or telemetry policies, you should know: + +- **Zed is open source** (MIT licensed for the editor, AGPL for collaboration services) +- **Telemetry is optional** and can be disabled during onboarding or in settings +- **No license tiers**: All features are available to everyone + +## Collaboration in Zed vs. RustRover + +RustRover offers Code With Me as a separate feature for collaboration. Zed has collaboration built into the core experience. + +- Open the Collab Panel in the left dock +- Create a channel and [invite your collaborators](https://zed.dev/docs/collaboration#inviting-a-collaborator) to join +- [Share your screen or your codebase](https://zed.dev/docs/collaboration#share-a-project) directly + +Once connected, you'll see each other's cursors, selections, and edits in real time. Voice chat is included. There's no need for separate tools or third-party logins. + +## Using AI in Zed + +If you're used to AI assistants in RustRover (like JetBrains AI Assistant), Zed offers similar capabilities with more flexibility. + +### Configuring GitHub Copilot + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** + +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. + +### Additional AI Options + +To use other AI models in Zed, you have several options: + +- Use Zed's hosted models, with higher rate limits. Requires [authentication](https://zed.dev/docs/accounts.html) and subscription to [Zed Pro](https://zed.dev/docs/ai/subscription.html). +- Bring your own [API keys](https://zed.dev/docs/ai/llm-providers.html), no authentication needed +- Use [external agents like Claude Code](https://zed.dev/docs/ai/external-agents.html) + +## Advanced Config and Productivity Tweaks + +Zed exposes advanced settings for power users who want to fine-tune their environment. + +Here are a few useful tweaks for Rust developers: + +**Format on Save (uses rustfmt by default):** + +```json +"format_on_save": "on" +``` + +**Configure inlay hints for Rust:** + +```json +{ + "inlay_hints": { + "enabled": true, + "show_type_hints": true, + "show_parameter_hints": true, + "show_other_hints": true + } +} +``` + +**Configure rust-analyzer settings:** + +```json +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "checkOnSave": { + "command": "clippy" + }, + "cargo": { + "allFeatures": true + }, + "procMacro": { + "enable": true + } + } + } + } +} +``` + +**Use a separate target directory for rust-analyzer (faster builds):** + +```json +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "rust-analyzer.cargo.targetDir": true + } + } + } +} +``` + +This tells rust-analyzer to use `target/rust-analyzer` instead of `target`, so IDE analysis doesn't conflict with your manual `cargo build` commands. + +**Enable direnv support (useful for Rust projects using direnv):** + +```json +"load_direnv": "shell_hook" +``` + +**Configure linked projects for workspaces:** + +If you work with multiple Cargo projects that aren't in a workspace, you can tell rust-analyzer about them: + +```json +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "linkedProjects": ["./project-a/Cargo.toml", "./project-b/Cargo.toml"] + } + } + } +} +``` + +## Next Steps + +Now that you're set up, here are some resources to help you get the most out of Zed: + +- [Configuring Zed](../configuring-zed.md) — Customize settings, themes, and editor behavior +- [Key Bindings](../key-bindings.md) — Learn how to customize and extend your keymap +- [Tasks](../tasks.md) — Set up build and run commands for your projects +- [AI Features](../ai/overview.md) — Explore Zed's AI capabilities beyond code completion +- [Collaboration](../collaboration/overview.md) — Share your projects and code together in real time +- [Rust in Zed](../languages/rust.md) — Rust-specific setup and configuration diff --git a/docs/src/migrate/vs-code.md b/docs/src/migrate/vs-code.md new file mode 100644 index 0000000000000000000000000000000000000000..dd7419e3ff31dc2892c19b68bf91b340eab24576 --- /dev/null +++ b/docs/src/migrate/vs-code.md @@ -0,0 +1,373 @@ +# How to Migrate from VS Code to Zed + +This guide is for developers who spent serious time in VS Code and want to try Zed without starting from scratch. + +If you’re here, you might be looking for a faster editor. Or something less cluttered. Or you’re curious about built-in collaboration. Whatever brought you here, this guide helps you move over your habits, shortcuts, and settings. + +We’ll cover what to bring, what to change, and what’s different. You can ease in gradually or switch all at once. Either way, you’ll stay productive. + +## Install Zed + +Zed is available on macOS, Windows, and Linux. + +For macOS, you can download it from zed.dev/download, or install via Homebrew: +`brew install zed-editor/zed/zed` + +For most Linux users, the easiest way to install Zed is through our installation script: +`curl -f https://zed.dev/install.sh | sh` + +After installation, you can launch Zed from your Applications folder (macOS) or directly from the terminal (Linux) using: +`zed .` +This opens the current directory in Zed. + +## Import Settings from VS Code + +During setup, you have the option to import key settings from VS Code. Zed imports the following settings: + +### Settings Imported from VS Code + +The following VS Code settings are automatically imported when you use **Import Settings from VS Code**: + +**Editor** + +| VS Code Setting | Zed Setting | +| ------------------------------------------- | ---------------------------------------------- | +| `editor.fontFamily` | `buffer_font_family` | +| `editor.fontSize` | `buffer_font_size` | +| `editor.fontWeight` | `buffer_font_weight` | +| `editor.tabSize` | `tab_size` | +| `editor.insertSpaces` | `hard_tabs` (inverted) | +| `editor.wordWrap` | `soft_wrap` | +| `editor.wordWrapColumn` | `preferred_line_length` | +| `editor.cursorStyle` | `cursor_shape` | +| `editor.cursorBlinking` | `cursor_blink` | +| `editor.renderLineHighlight` | `current_line_highlight` | +| `editor.lineNumbers` | `gutter.line_numbers`, `relative_line_numbers` | +| `editor.showFoldingControls` | `gutter.folds` | +| `editor.minimap.enabled` | `minimap.show` | +| `editor.minimap.autohide` | `minimap.show` | +| `editor.minimap.showSlider` | `minimap.thumb` | +| `editor.minimap.maxColumn` | `minimap.max_width_columns` | +| `editor.stickyScroll.enabled` | `sticky_scroll.enabled` | +| `editor.scrollbar.horizontal` | `scrollbar.axes.horizontal` | +| `editor.scrollbar.vertical` | `scrollbar.axes.vertical` | +| `editor.mouseWheelScrollSensitivity` | `scroll_sensitivity` | +| `editor.fastScrollSensitivity` | `fast_scroll_sensitivity` | +| `editor.cursorSurroundingLines` | `vertical_scroll_margin` | +| `editor.hover.enabled` | `hover_popover_enabled` | +| `editor.hover.delay` | `hover_popover_delay` | +| `editor.parameterHints.enabled` | `auto_signature_help` | +| `editor.multiCursorModifier` | `multi_cursor_modifier` | +| `editor.selectionHighlight` | `selection_highlight` | +| `editor.roundedSelection` | `rounded_selection` | +| `editor.find.seedSearchStringFromSelection` | `seed_search_query_from_cursor` | +| `editor.rulers` | `wrap_guides` | +| `editor.renderWhitespace` | `show_whitespaces` | +| `editor.guides.indentation` | `indent_guides.enabled` | +| `editor.linkedEditing` | `linked_edits` | +| `editor.autoSurround` | `use_auto_surround` | +| `editor.formatOnSave` | `format_on_save` | +| `editor.formatOnPaste` | `auto_indent_on_paste` | +| `editor.formatOnType` | `use_on_type_format` | +| `editor.trimAutoWhitespace` | `remove_trailing_whitespace_on_save` | +| `editor.suggestOnTriggerCharacters` | `show_completions_on_input` | +| `editor.suggest.showWords` | `completions.words` | +| `editor.inlineSuggest.enabled` | `show_edit_predictions` | + +**Files & Workspace** + +| VS Code Setting | Zed Setting | +| --------------------------- | ------------------------------ | +| `files.autoSave` | `autosave` | +| `files.autoSaveDelay` | `autosave.milliseconds` | +| `files.insertFinalNewline` | `ensure_final_newline_on_save` | +| `files.associations` | `file_types` | +| `files.watcherExclude` | `file_scan_exclusions` | +| `files.watcherInclude` | `file_scan_inclusions` | +| `files.simpleDialog.enable` | `use_system_path_prompts` | +| `search.smartCase` | `use_smartcase_search` | +| `search.useIgnoreFiles` | `search.include_ignored` | + +**Terminal** + +| VS Code Setting | Zed Setting | +| ------------------------------------- | ----------------------------------- | +| `terminal.integrated.fontFamily` | `terminal.font_family` | +| `terminal.integrated.fontSize` | `terminal.font_size` | +| `terminal.integrated.lineHeight` | `terminal.line_height` | +| `terminal.integrated.cursorStyle` | `terminal.cursor_shape` | +| `terminal.integrated.cursorBlinking` | `terminal.blinking` | +| `terminal.integrated.copyOnSelection` | `terminal.copy_on_select` | +| `terminal.integrated.scrollback` | `terminal.max_scroll_history_lines` | +| `terminal.integrated.macOptionIsMeta` | `terminal.option_as_meta` | +| `terminal.integrated.{platform}Exec` | `terminal.shell` | +| `terminal.integrated.env.{platform}` | `terminal.env` | + +**Tabs & Panels** + +| VS Code Setting | Zed Setting | +| -------------------------------------------------- | -------------------------------------------------- | +| `workbench.editor.showTabs` | `tab_bar.show` | +| `workbench.editor.showIcons` | `tabs.file_icons` | +| `workbench.editor.tabActionLocation` | `tabs.close_position` | +| `workbench.editor.tabActionCloseVisibility` | `tabs.show_close_button` | +| `workbench.editor.focusRecentEditorAfterClose` | `tabs.activate_on_close` | +| `workbench.editor.enablePreview` | `preview_tabs.enabled` | +| `workbench.editor.enablePreviewFromQuickOpen` | `preview_tabs.enable_preview_from_file_finder` | +| `workbench.editor.enablePreviewFromCodeNavigation` | `preview_tabs.enable_preview_from_code_navigation` | +| `workbench.editor.editorActionsLocation` | `tab_bar.show_tab_bar_buttons` | +| `workbench.editor.limit.enabled` / `value` | `max_tabs` | +| `workbench.editor.restoreViewState` | `restore_on_file_reopen` | +| `workbench.statusBar.visible` | `status_bar.show` | + +**Project Panel (File Explorer)** + +| VS Code Setting | Zed Setting | +| ------------------------------ | ----------------------------------- | +| `explorer.compactFolders` | `project_panel.auto_fold_dirs` | +| `explorer.autoReveal` | `project_panel.auto_reveal_entries` | +| `explorer.excludeGitIgnore` | `project_panel.hide_gitignore` | +| `problems.decorations.enabled` | `project_panel.show_diagnostics` | +| `explorer.decorations.badges` | `project_panel.git_status` | + +**Git** + +| VS Code Setting | Zed Setting | +| ------------------------------------ | ---------------------------------------------- | +| `git.enabled` | `git_panel.button` | +| `git.defaultBranchName` | `git_panel.fallback_branch_name` | +| `git.decorations.enabled` | `git.inline_blame`, `project_panel.git_status` | +| `git.blame.editorDecoration.enabled` | `git.inline_blame.enabled` | + +**Window & Behavior** + +| VS Code Setting | Zed Setting | +| ------------------------------------------------ | ---------------------------------------- | +| `window.confirmBeforeClose` | `confirm_quit` | +| `window.nativeTabs` | `use_system_window_tabs` | +| `window.closeWhenEmpty` | `when_closing_with_no_tabs` | +| `accessibility.dimUnfocused.enabled` / `opacity` | `active_pane_modifiers.inactive_opacity` | + +**Other** + +| VS Code Setting | Zed Setting | +| -------------------------- | -------------------------------------------------------- | +| `http.proxy` | `proxy` | +| `npm.packageManager` | `node.npm_path` | +| `telemetry.telemetryLevel` | `telemetry.metrics`, `telemetry.diagnostics` | +| `outline.icons` | `outline_panel.file_icons`, `outline_panel.folder_icons` | +| `chat.agent.enabled` | `agent.enabled` | +| `mcp` | `context_servers` | + +Zed doesn’t import extensions or keybindings, but this is the fastest way to get a familiar feel while trying something new. If you skip that step during setup, you can still import settings manually later via the command palette: + +`Cmd+Shift+P → Zed: Import VS Code Settings` + +## Set Up Editor Preferences + +You can also configure settings manually in the Settings Editor. + +To edit your settings: + +1. `Cmd+,` to open the Settings Editor. +2. Run `zed: open settings` in the Command Palette. + +Here’s how common VS Code settings translate: +| VS Code | Zed | Notes | +| --- | --- | --- | +| editor.fontFamily | buffer_font_family | Zed uses Zed Mono by default | +| editor.fontSize | buffer_font_size | Set in pixels | +| editor.tabSize | tab_size | Can override per language | +| editor.insertSpaces | insert_spaces | Boolean | +| editor.formatOnSave | format_on_save | Works with formatter enabled | +| editor.wordWrap | soft_wrap | Supports optional wrap column | + +Zed also supports per-project settings. You can find these in the Settings Editor as well. + +## Open or Create a Project + +After setup, press `Cmd+O` (`Ctrl+O` on Linux) to open a folder. This becomes your workspace in Zed. There's no support for multi-root workspaces or `.code-workspace` files like in VS Code. Zed keeps it simple: one folder, one workspace. + +To start a new project, create a directory using your terminal or file manager, then open it in Zed. The editor will treat that folder as the root of your project. + +You can also launch Zed from the terminal inside any folder with: +`zed .` + +Once inside a project, use `Cmd+P` to jump between files quickly. `Cmd+Shift+P` (`Ctrl+Shift+P` on Linux) opens the command palette for running actions / tasks, toggling settings, or starting a collaboration session. + +Open buffers appear as tabs across the top. The sidebar shows your file tree and Git status. Collapse it with `Cmd+B` for a distraction-free view. + +## Differences in Keybindings + +If you chose the VS Code keymap during onboarding, you're likely good to go, and most of your shortcuts should already feel familiar. +Here’s a quick reference guide for how our keybindings compare to what you’re used to coming from VS Code. + +### Common Shared Keybindings (Zed <> VS Code) + +| Action | Shortcut | +| --------------------------- | ---------------------- | +| Find files | `Cmd + P` | +| Run a command | `Cmd + Shift + P` | +| Search text (project-wide) | `Cmd + Shift + F` | +| Find symbols (project-wide) | `Cmd + T` | +| Find symbols (file-wide) | `Cmd + Shift + O` | +| Toggle left dock | `Cmd + B` | +| Toggle bottom dock | `Cmd + J` | +| Open terminal | `Ctrl + ~` | +| Open file tree explorer | `Cmd + Shift + E` | +| Close current buffer | `Cmd + W` | +| Close whole project | `Cmd + Shift + W` | +| Refactor: rename symbol | `F2` | +| Change theme | `Cmd + K, Cmd + T` | +| Wrap text | `Opt + Z` | +| Navigate open tabs | `Cmd + Opt + Arrow` | +| Syntactic fold / unfold | `Cmd + Opt + {` or `}` | + +### Different Keybindings (Zed <> VS Code) + +| Action | VS Code | Zed | +| ------------------- | --------------------- | ---------------------- | +| Open recent project | `Ctrl + R` | `Cmd + Opt + O` | +| Move lines up/down | `Opt + Up/Down` | `Cmd + Ctrl + Up/Down` | +| Split panes | `Cmd + \` | `Cmd + K, Arrow Keys` | +| Expand Selection | `Shift + Alt + Right` | `Opt + Up` | + +### Unique to Zed + +| Action | Shortcut | Notes | +| ------------------- | ---------------------------- | ------------------------------------------------ | +| Toggle right dock | `Cmd + R` or `Cmd + Alt + B` | | +| Syntactic selection | `Opt + Up/Down` | Selects code by structure (e.g., inside braces). | + +### How to Customize Keybindings + +To edit your keybindings: + +- Open the command palette (`Cmd+Shift+P`) +- Run `Zed: Open Keymap Editor` + +This opens a list of all available bindings. You can override individual shortcuts, remove conflicts, or build a layout that works better for your setup. + +Zed also supports chords (multi-key sequences) like `Cmd+K Cmd+C`, like VS Code does. + +## Differences in User Interfaces + +### No Workspace + +VS Code uses a dedicated Workspace concept, with multi-root folders, `.code-workspace` files, and a clear distinction between “a window” and “a workspace.” +Zed simplifies this model. + +In Zed: + +- There is no workspace file format. Opening a folder is your project context. + +- Zed does not support multi-root workspaces. You can only open one folder at a time in a window. + +- Most project-level behavior is scoped to the folder you open. Search, Git integration, tasks, and environment detection all treat the opened directory as the project root. + +- Per-project settings are optional. You can add a `.zed/settings.json` file inside a project to override global settings, but Zed does not use `.code-workspace` files and won’t import them. + +- You can start from a single file or an empty window. Zed doesn’t require you to open a folder to begin editing. + +The result is a simpler model: +Open a folder → work inside that folder → no additional workspace layer. + +### Navigating in a Project + +In VS Code, the standard entry point is opening a folder. From there, the left-hand sidebar is central to your navigation. +Zed takes a different approach: + +- You can still open folders, but you don’t need to. Opening a single file or even starting with an empty workspace is valid. +- The Command Palette (`Cmd+Shift+P`) and File Finder (`Cmd+P`) are your primary navigation tools. The File Finder searches across the entire workspace instantly; files, symbols, commands, even teammates if you're collaborating. +- Instead of a persistent sidebar, Zed encourages you to: + - Fuzzy-find files by name (`Cmd+P`) + - Jump directly to symbols (`Cmd+Shift+O`) + - Use split panes and tabs for context, rather than keeping a large file tree open (though you can do this with the Project Panel if you prefer). + +The UI is intentionally minimal. Panels slide in only when needed, then get out of your way. The focus is on flowing between code instead of managing panes. + +### Extensions vs. Marketplace + +Zed does not offer as many extensions as VS Code. The available extensions are focused on language support, themes, syntax highlighting, and other core editing enhancements. + +However there are several features that typically require extensions in VS Code which we built directly into Zed: + +- Real-time collaboration with voice and cursor sharing (no Live Share required) +- AI coding assistance (no Copilot extension needed) +- Built-in terminal panel +- Project-wide fuzzy search +- Task runner with JSON config +- Inline diagnostics and code actions via LSP + +You won’t find one-to-one replacements for every VS Code extension, especially if you rely on tools for DevOps, containers, or test runners. Zed's extension ecosystem is still growing, and the catalog is smaller by design. + +### Collaboration in Zed vs. VS Code + +Unlike VS Code, Zed doesn’t require an extension to collaborate. It’s built into the core experience. + +- Open the Collab Panel in the left dock. +- Create a channel and [invite your collaborators](https://zed.dev/docs/collaboration#inviting-a-collaborator) to join. +- [Share your screen or your codebase](https://zed.dev/docs/collaboration#share-a-project) directly. + +Once connected, you’ll see each other's cursors, selections, and edits in real time. Voice chat is included, so you can talk as you work. There’s no need for separate tools or third-party logins. Zed’s collaboration is designed for everything from quick pair programming to longer team sessions. + +Learn how [Zed uses Zed](https://zed.dev/blog/zed-is-our-office) to plan work and collaborate. + +### Using AI in Zed + +If you’re used to GitHub Copilot in VS Code, you can do the same in Zed. You can also explore other agents through Zed Pro, or bring your own keys and connect without authentication. Zed is designed to enable many options for using AI, including disabling it entirely. + +#### Configuring GitHub Copilot + +You should be able to sign-in to GitHub Copilot by clicking on the Zeta icon in the status bar and following the setup instructions. +You can also add this to your settings: + +```json +{ + "features": { + "edit_prediction_provider": "copilot" + } +} +``` + +To invoke completions, just start typing. Zed will offer suggestions inline for you to accept. + +#### Additional AI Options + +To use other AI models in Zed, you have several options: + +- Use Zed’s hosted models, with higher rate limits. Requires [authentication](https://zed.dev/docs/accounts.html) and subscription to [Zed Pro](https://zed.dev/docs/ai/subscription.html). +- Bring your own [API keys](https://zed.dev/docs/ai/llm-providers.html), no authentication needed +- Use [external agents like Claude Code](https://zed.dev/docs/ai/external-agents.html). + +### Advanced Config and Productivity Tweaks + +Zed exposes advanced settings for power users who want to fine-tune their environment. + +Here are a few useful tweaks: + +**Format on Save:** + +```json +"format_on_save": "on" +``` + +**Enable direnv support:** + +```json +"load_direnv": "shell_hook" +``` + +**Custom Tasks**: Define build or run commands in your `tasks.json` (accessed via command palette: `zed: open tasks`): + +```json +[ + { + "label": "build", + "command": "cargo build" + } +] +``` + +**Bring over custom snippets** +Copy your VS Code snippet JSON directly into Zed's snippets folder (`zed: configure snippets`). diff --git a/docs/src/migrate/webstorm.md b/docs/src/migrate/webstorm.md new file mode 100644 index 0000000000000000000000000000000000000000..78b80b355b47370a821f08fd6108d947182f0acf --- /dev/null +++ b/docs/src/migrate/webstorm.md @@ -0,0 +1,455 @@ +# How to Migrate from WebStorm to Zed + +This guide covers how to set up Zed if you're coming from WebStorm, including keybindings, settings, and the differences you should expect as a JavaScript/TypeScript developer. + +## Install Zed + +Zed is available on macOS, Windows, and Linux. + +For macOS, you can download it from zed.dev/download, or install via Homebrew: + +```sh +brew install --cask zed +``` + +For Windows, download the installer from zed.dev/download, or install via winget: + +```sh +winget install Zed.Zed +``` + +For most Linux users, the easiest way to install Zed is through our installation script: + +```sh +curl -f https://zed.dev/install.sh | sh +``` + +After installation, you can launch Zed from your Applications folder (macOS), Start menu (Windows), or directly from the terminal using: +`zed .` +This opens the current directory in Zed. + +## Set Up the JetBrains Keymap + +If you're coming from WebStorm, the fastest way to feel at home is to use the JetBrains keymap. During onboarding, you can select it as your base keymap. If you missed that step, you can change it anytime: + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Search for `Base Keymap` +3. Select `JetBrains` + +Or add this directly to your `settings.json`: + +```json +{ + "base_keymap": "JetBrains" +} +``` + +This maps familiar shortcuts like `Shift Shift` for Search Everywhere, `Cmd+O` for Go to Class, and `Cmd+Shift+A` for Find Action. + +## Set Up Editor Preferences + +You can configure settings manually in the Settings Editor. + +To edit your settings: + +1. `Cmd+,` to open the Settings Editor. +2. Run `zed: open settings` in the Command Palette. + +Settings WebStorm users typically configure first: + +| Zed Setting | What it does | +| ----------------------- | ------------------------------------------------------------------------------- | +| `format_on_save` | Auto-format when saving. Set to `"on"` to enable. | +| `soft_wrap` | Wrap long lines. Options: `"none"`, `"editor_width"`, `"preferred_line_length"` | +| `preferred_line_length` | Column width for wrapping and rulers. Default is 80. | +| `inlay_hints` | Show parameter names and type hints inline, like WebStorm's hints. | +| `relative_line_numbers` | Useful if you're coming from IdeaVim. | + +Zed also supports per-project settings. Create a `.zed/settings.json` file in your project root to override global settings for that project, similar to how you might use `.idea` folders in WebStorm. + +> **Tip:** If you're joining an existing project, check `format_on_save` before making your first commit. Otherwise you might accidentally reformat an entire file when you only meant to change one line. + +## Open or Create a Project + +After setup, press `Cmd+Shift+O` (with JetBrains keymap) to open a folder. This becomes your workspace in Zed. Unlike WebStorm, there's no project configuration wizard, no framework selection dialog, and no project structure setup required. + +To start a new project, create a directory using your terminal or file manager, then open it in Zed. The editor will treat that folder as the root of your project. For new projects, you'd typically run `npm init`, `pnpm create`, or your framework's CLI tool first, then open the resulting folder in Zed. + +You can also launch Zed from the terminal inside any folder with: +`zed .` + +Once inside a project: + +- Use `Cmd+Shift+O` or `Cmd+E` to jump between files quickly (like WebStorm's "Recent Files") +- Use `Cmd+Shift+A` or `Shift Shift` to open the Command Palette (like WebStorm's "Search Everywhere") +- Use `Cmd+O` to search for symbols (like WebStorm's "Go to Symbol") + +Open buffers appear as tabs across the top. The sidebar shows your file tree and Git status. Toggle it with `Cmd+1` (just like WebStorm's Project tool window). + +## Differences in Keybindings + +If you chose the JetBrains keymap during onboarding, most of your shortcuts should already feel familiar. Here's a quick reference for how Zed compares to WebStorm. + +### Common Shared Keybindings + +| Action | Shortcut | +| ----------------------------- | ----------------------- | +| Search Everywhere | `Shift Shift` | +| Find Action / Command Palette | `Cmd + Shift + A` | +| Go to File | `Cmd + Shift + O` | +| Go to Symbol | `Cmd + O` | +| Recent Files | `Cmd + E` | +| Go to Definition | `Cmd + B` | +| Find Usages | `Alt + F7` | +| Rename Symbol | `Shift + F6` | +| Reformat Code | `Cmd + Alt + L` | +| Toggle Project Panel | `Cmd + 1` | +| Toggle Terminal | `Alt + F12` | +| Duplicate Line | `Cmd + D` | +| Delete Line | `Cmd + Backspace` | +| Move Line Up/Down | `Shift + Alt + Up/Down` | +| Expand/Shrink Selection | `Alt + Up/Down` | +| Comment Line | `Cmd + /` | +| Go Back / Forward | `Cmd + [` / `Cmd + ]` | +| Toggle Breakpoint | `Ctrl + F8` | + +### Different Keybindings (WebStorm → Zed) + +| Action | WebStorm | Zed (JetBrains keymap) | +| ---------------------- | ----------- | ------------------------ | +| File Structure | `Cmd + F12` | `Cmd + F12` (outline) | +| Navigate to Next Error | `F2` | `F2` | +| Run | `Ctrl + R` | `Ctrl + Alt + R` (tasks) | +| Debug | `Ctrl + D` | `Alt + Shift + F9` | +| Stop | `Cmd + F2` | `Ctrl + F2` | + +### Unique to Zed + +| Action | Shortcut | Notes | +| ----------------- | -------------------------- | ------------------------------ | +| Toggle Right Dock | `Cmd + R` | Assistant panel, notifications | +| Split Panes | `Cmd + K`, then arrow keys | Create splits in any direction | + +### How to Customize Keybindings + +- Open the Command Palette (`Cmd+Shift+A` or `Shift Shift`) +- Run `Zed: Open Keymap Editor` + +This opens a list of all available bindings. You can override individual shortcuts or remove conflicts. + +Zed also supports key sequences (multi-key shortcuts). + +## Differences in User Interfaces + +### No Indexing + +If you've used WebStorm on large projects, you know the wait. Opening a project with many dependencies can mean watching "Indexing..." for anywhere from 30 seconds to several minutes. WebStorm indexes your entire codebase and `node_modules` to power its code intelligence, and re-indexes when dependencies change. + +Zed doesn't index. You open a folder and start coding immediately—no progress bars, no "Indexing paused" banners. File search and navigation stay fast regardless of project size or how many `node_modules` dependencies you have. + +WebStorm's index enables features like finding all usages across your entire codebase, tracking import hierarchies, and flagging unused exports project-wide. Zed relies on language servers for this analysis, which may not cover as much ground. + +**How to adapt:** + +- Search symbols across the project with `Cmd+O` (powered by the TypeScript language server) +- Find files by name with `Cmd+Shift+O` +- Use `Cmd+Shift+F` for text search—it stays fast even in large monorepos +- Run `tsc --noEmit` or `eslint .` from the terminal when you need deeper project-wide analysis + +### LSP vs. Native Language Intelligence + +WebStorm has its own JavaScript and TypeScript analysis engine built by JetBrains. This engine understands your code deeply: it resolves types, tracks data flow, knows about framework-specific patterns, and offers specialized refactorings. + +Zed uses the Language Server Protocol (LSP) for code intelligence. For JavaScript and TypeScript, Zed supports: + +- **vtsls** (default) — Fast TypeScript language server with excellent performance +- **typescript-language-server** — The standard TypeScript LSP implementation +- **ESLint** — Linting integration +- **Prettier** — Code formatting (built-in) + +The TypeScript LSP experience is mature and robust. You get accurate completions, type checking, go-to-definition, and find-references. The experience is comparable to VS Code, which uses the same underlying TypeScript services. + +Where you might notice differences: + +- Framework-specific intelligence (Angular templates, Vue SFCs) may be less integrated +- Some complex refactorings (extract component with proper imports) may be less sophisticated +- Auto-import suggestions depend on what the language server knows about your project + +**How to adapt:** + +- Use `Alt+Enter` for available code actions—the list will vary by language server +- Ensure your `tsconfig.json` is properly configured so the language server understands your project structure +- Use Prettier for consistent formatting (it's enabled by default for JS/TS) +- For code inspection similar to WebStorm's "Inspect Code," check the Diagnostics panel (`Cmd+6`)—ESLint and TypeScript together catch many of the same issues + +### No Project Model + +WebStorm manages projects through `.idea` folders containing XML configuration files, framework detection, and run configurations. This model lets WebStorm remember your project settings, manage npm scripts through the UI, and persist run/debug setups. + +Zed takes a different approach: a project is just a folder. There's no setup wizard, no framework detection dialog, no project structure to configure. + +What this means in practice: + +- Run configurations aren't a thing. Define reusable commands in `tasks.json` instead. Note that your existing `.idea/` configurations won't carry over—you'll set up the ones you need fresh. +- npm scripts live in the terminal. Run `npm run dev`, `pnpm build`, or `yarn test` directly—there's no dedicated npm panel. +- No framework detection. Zed treats React, Angular, Vue, and vanilla JS/TS the same way. + +**How to adapt:** + +- Create a `.zed/settings.json` in your project root for project-specific settings +- Define common commands in `tasks.json` (open via Command Palette: `zed: open tasks`): + +```json +[ + { + "label": "dev", + "command": "npm run dev" + }, + { + "label": "build", + "command": "npm run build" + }, + { + "label": "test", + "command": "npm test" + }, + { + "label": "test current file", + "command": "npm test -- $ZED_FILE" + } +] +``` + +- Use `Ctrl+Alt+R` to run tasks quickly +- Lean on your terminal (`Alt+F12`) for anything tasks don't cover + +### No Framework Integration + +WebStorm's value for web development comes largely from its framework integration. React components get special treatment. Angular has dedicated tooling. Vue single-file components are fully understood. The npm tool window shows all your scripts. + +Zed has none of this built-in. The TypeScript language server sees your code as TypeScript—it doesn't understand that a function is a React component or that a file is an Angular service. + +**How to adapt:** + +- Use grep and file search liberally. `Cmd+Shift+F` with a regex can find component definitions, route configurations, or API endpoints. +- Rely on your language server's "find references" (`Alt+F7`) for navigation—it works, just without framework context +- Consider using framework-specific CLI tools (`ng`, `next`, `vite`) from Zed's terminal +- For React, JSX/TSX syntax and TypeScript types still provide good intelligence + +> **Tip:** For projects with complex configurations, keep your framework's documentation handy. Zed's speed comes with less hand-holding for framework-specific features. + +### Tool Windows vs. Docks + +WebStorm organizes auxiliary views into numbered tool windows (Project = 1, npm = Alt+F11, Terminal = Alt+F12, etc.). Zed uses a similar concept called "docks": + +| WebStorm Tool Window | Zed Equivalent | Shortcut (JetBrains keymap) | +| -------------------- | -------------- | --------------------------- | +| Project (1) | Project Panel | `Cmd + 1` | +| Git (9 or Cmd+0) | Git Panel | `Cmd + 0` | +| Terminal (Alt+F12) | Terminal Panel | `Alt + F12` | +| Structure (7) | Outline Panel | `Cmd + 7` | +| Problems (6) | Diagnostics | `Cmd + 6` | +| Debug (5) | Debug Panel | `Cmd + 5` | + +Zed has three dock positions: left, bottom, and right. Panels can be moved between docks by dragging or through settings. + +Note that there's no dedicated npm tool window in Zed. Use the terminal or define tasks for your common npm scripts. + +### Debugging + +Both WebStorm and Zed offer integrated debugging for JavaScript and TypeScript: + +- Zed uses `vscode-js-debug` (the same debug adapter that VS Code uses) +- Set breakpoints with `Ctrl+F8` +- Start debugging with `Alt+Shift+F9` or press `F4` and select a debug target +- Step through code with `F7` (step into), `F8` (step over), `Shift+F8` (step out) +- Continue execution with `F9` + +Zed can debug: + +- Node.js applications and scripts +- Chrome/browser JavaScript +- Jest, Mocha, Vitest, and other test frameworks +- Next.js (both server and client-side) + +For more control, create a `.zed/debug.json` file: + +```json +[ + { + "label": "Debug Current File", + "adapter": "JavaScript", + "program": "$ZED_FILE", + "request": "launch" + }, + { + "label": "Debug Node Server", + "adapter": "JavaScript", + "request": "launch", + "program": "${workspaceFolder}/src/server.js" + }, + { + "label": "Attach to Chrome", + "adapter": "JavaScript", + "request": "attach", + "port": 9222 + } +] +``` + +Zed also recognizes `.vscode/launch.json` configurations, so existing VS Code debug setups often work out of the box. + +### Running Tests + +WebStorm has a dedicated test runner with a visual interface showing pass/fail status for each test. Zed provides test running through: + +- **Gutter icons** — Click the play button next to test functions or describe blocks +- **Tasks** — Define test commands in `tasks.json` +- **Terminal** — Run `npm test`, `jest`, `vitest`, etc. directly + +Zed supports auto-detection for common test frameworks: + +- Jest +- Mocha +- Vitest +- Jasmine +- Bun test +- Node.js test runner + +The test output appears in the terminal panel. For Jest, use `--verbose` for detailed output or `--watch` for continuous testing during development. + +### Extensions vs. Plugins + +WebStorm has a plugin ecosystem covering additional language support, themes, and tool integrations. + +Zed's extension ecosystem is smaller and more focused: + +- Language support and syntax highlighting +- Themes +- Slash commands for AI +- Context servers + +Several features that require plugins in WebStorm are built into Zed: + +- Real-time collaboration with voice chat +- AI coding assistance +- Built-in terminal +- Task runner +- LSP-based code intelligence +- Prettier formatting +- ESLint integration + +### What's Not in Zed + +To set expectations clearly, here's what WebStorm offers that Zed doesn't have: + +- **npm tool window** — Use the terminal or tasks instead +- **HTTP Client** — Use tools like Postman, Insomnia, or curl +- **Database tools** — Use DataGrip, DBeaver, or TablePlus +- **Framework-specific tooling** (Angular schematics, React refactorings) — Use CLI tools +- **Visual package.json editor** — Edit the file directly +- **Built-in REST client** — Use external tools or extensions +- **Profiler integration** — Use Chrome DevTools or Node.js profiling tools + +## Collaboration in Zed vs. WebStorm + +WebStorm offers Code With Me as a separate feature for collaboration. Zed has collaboration built into the core experience. + +- Open the Collab Panel in the left dock +- Create a channel and [invite your collaborators](https://zed.dev/docs/collaboration#inviting-a-collaborator) to join +- [Share your screen or your codebase](https://zed.dev/docs/collaboration#share-a-project) directly + +Once connected, you'll see each other's cursors, selections, and edits in real time. Voice chat is included. There's no need for separate tools or third-party logins. + +## Using AI in Zed + +If you're used to AI assistants in WebStorm (like GitHub Copilot, JetBrains AI Assistant, or Junie), Zed offers similar capabilities with more flexibility. + +### Configuring GitHub Copilot + +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** + +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. + +### Additional AI Options + +To use other AI models in Zed, you have several options: + +- Use Zed's hosted models, with higher rate limits. Requires [authentication](https://zed.dev/docs/accounts.html) and subscription to [Zed Pro](https://zed.dev/docs/ai/subscription.html). +- Bring your own [API keys](https://zed.dev/docs/ai/llm-providers.html), no authentication needed +- Use [external agents like Claude Code](https://zed.dev/docs/ai/external-agents.html) + +## Advanced Config and Productivity Tweaks + +Zed exposes advanced settings for power users who want to fine-tune their environment. + +Here are a few useful tweaks for JavaScript/TypeScript developers: + +**Format on Save:** + +```json +"format_on_save": "on" +``` + +**Configure Prettier as the default formatter:** + +```json +{ + "formatter": { + "external": { + "command": "prettier", + "arguments": ["--stdin-filepath", "{buffer_path}"] + } + } +} +``` + +**Enable ESLint code actions:** + +```json +{ + "lsp": { + "eslint": { + "settings": { + "codeActionOnSave": { + "rules": ["import/order"] + } + } + } + } +} +``` + +**Configure TypeScript strict mode hints:** + +In your `tsconfig.json`, enable strict mode for better type checking: + +```json +{ + "compilerOptions": { + "strict": true, + "noUncheckedIndexedAccess": true + } +} +``` + +**Enable direnv support (useful for projects using direnv for environment variables):** + +```json +"load_direnv": "shell_hook" +``` + +## Next Steps + +Now that you're set up, here are some resources to help you get the most out of Zed: + +- [Configuring Zed](../configuring-zed.md) — Customize settings, themes, and editor behavior +- [Key Bindings](../key-bindings.md) — Learn how to customize and extend your keymap +- [Tasks](../tasks.md) — Set up build and run commands for your projects +- [AI Features](../ai/overview.md) — Explore Zed's AI capabilities beyond code completion +- [Collaboration](../collaboration/overview.md) — Share your projects and code together in real time +- [JavaScript in Zed](../languages/javascript.md) — JavaScript-specific setup and configuration +- [TypeScript in Zed](../languages/typescript.md) — TypeScript-specific setup and configuration diff --git a/docs/src/performance.md b/docs/src/performance.md new file mode 100644 index 0000000000000000000000000000000000000000..544e39e94babbf9c335a847af8819ad5b00494d1 --- /dev/null +++ b/docs/src/performance.md @@ -0,0 +1,93 @@ +How to use our internal tools to profile and keep Zed fast. + +# Rough quick CPU profiling (Flamechart) + +See what the CPU spends the most time on. Strongly recommend you use +[samply](https://github.com/mstange/samply). It opens an interactive profile in +the browser (specifically a local instance of [firefox_profiler](https://profiler.firefox.com/)). + +See [samply](https://github.com/mstange/samply)'s README on how to install and run. + +The profile.json does not contain any symbols. Firefox profiler can add the local symbols to the profile for for. To do that hit the upload local profile button in the top right corner. + +image + +# In depth CPU profiling (Tracing) + +See how long each annotated function call took and its arguments (if +configured). + +Annotate any function you need appear in the profile with instrument. For more +details see +[tracing-instrument](https://docs.rs/tracing/latest/tracing/attr.instrument.html): + +```rust +#[instrument(skip_all)] +fn should_appear_in_profile(kitty: Cat) { + sleep(QUITE_LONG) +} +``` + +Then either compile Zed with `ZTRACING=1 cargo r --features tracy --release`. The release build is optional but highly recommended as like every program Zeds performance characteristics change dramatically with optimizations. You do not want to chase slowdowns that do not exist in release. + +## One time Setup/Building the profiler: + +Download the profiler: +[linux x86_64](https://zed-tracy-import-miniprofiler.nyc3.digitaloceanspaces.com/tracy-profiler-linux-x86_64) +[macos aarch64](https://zed-tracy-import-miniprofiler.nyc3.digitaloceanspaces.com/tracy-profiler-0.13.0-macos-aarch64) + +### Alternative: Building it yourself + +- Clone the repo at git@github.com:wolfpld/tracy.git +- `cd profiler && mkdir build && cd build` +- Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..` +- Build the profiler: `ninja` +- [Optional] move the profiler somewhere nice like ~/.local/bin on linux + +## Usage + +Open the profiler (tracy-profiler), you should see zed in the list of `Discovered clients` click it. +image + +To find functions that take a long time follow this image: +image + +# Task/Async profiling + +Get a profile of the zed foreground executor and background executors. Check if +anything is blocking the foreground too long or taking too much (clock) time in +the background. + +The profiler always runs in the background. You can save a trace from its UI or +look at the results live. + +## Setup/Building the importer: + +Download the importer +[linux x86_64](https://zed-tracy-import-miniprofiler.nyc3.digitaloceanspaces.com/tracy-import-miniprofiler-linux-x86_64) +[mac aarch64](https://zed-tracy-import-miniprofiler.nyc3.digitaloceanspaces.com/tracy-import-miniprofiler-macos-aarch64) + +### Alternative: Building it yourself + +- Clone the repo at git@github.com:zed-industries/tracy.git on v0.12.2 branch +- `cd import && mkdir build && cd build` +- Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..` +- Build the importer: `ninja` +- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof /path/to/output.tracy` +- Open the trace in tracy: + - If you're on windows download the v0.12.2 version from the releases on the upstream repo + - If you're on other platforms open it on the website: https://tracy.nereid.pl/ (the version might mismatch so your luck might vary, we need to host our own ideally..) + +## To Save a Trace: + +- Run the action: `zed open performance profiler` +- Hit the save button. This opens a save dialog or if that fails to open the trace gets saved in your working directory. +- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler output.tracy` +- Go to hit the 'power button' in the top left and then open saved trace. +- Now zoom in to see the tasks and how long they took + +# Warn if function is slow + +```rust +let _timer = zlog::time!("my_function_name").warn_if_gt(std::time::Duration::from_millis(100)); +``` diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 057a3d2e0814e083a3ecbbeafd987762cd825388..c25d160a17549f6338f25741afd68391cf88d769 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -174,14 +174,38 @@ When opening a remote project there are three relevant settings locations: Both the local Zed and the server Zed read the project settings, but they are not aware of the other's main `settings.json`. -Depending on the kind of setting you want to make, which settings file you should use: +Which settings file you should use depends on the kind of setting you want to make: - Project settings should be used for things that affect the project: indentation settings, which formatter / language server to use, etc. -- Server settings should be used for things that affect the server: paths to language servers, etc. +- Server settings should be used for things that affect the server: paths to language servers, proxy settings, etc. - Local settings should be used for things that affect the UI: font size, etc. In addition any extensions you have installed locally will be propagated to the remote server. This means that language servers, etc. will run correctly. +## Proxy Configuration + +The remote server will not use your local machine's proxy configuration because they may be under different network policies. If your remote server requires a proxy to access the internet, you must configure it on the remote server itself. + +In most cases, your remote server will already have proxy environment variables configured. Zed will automatically use them when downloading language servers, communicating with LLM models, etc. + +If needed, you can set these environment variables in the server's shell configuration (e.g., `~/.bashrc`): + +```bash +export http_proxy="http://proxy.example.com:8080" +export https_proxy="http://proxy.example.com:8080" +export no_proxy="localhost,127.0.0.1" +``` + +Alternatively, you can configure the proxy in the remote machine's `~/.config/zed/settings.json` (Linux) or `~/.zed/settings.json` (macOS): + +```json +{ + "proxy": "http://proxy.example.com:8080" +} +``` + +See the [proxy documentation](./configuring-zed.md#network-proxy) for supported proxy types and additional configuration options. + ## Initializing the remote server Once you provide the SSH options, Zed shells out to `ssh` on your local machine to create a ControlMaster connection with the options you provide. @@ -206,7 +230,7 @@ If you are struggling with connection issues, you should be able to see more inf ## Supported SSH Options -Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and use then use that to multiplex SSH connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your SSH config file, but if you want to specify additional options to the SSH control master you can configure Zed to set them. +Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and then use that to multiplex SSH connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your SSH config file, but if you want to specify additional options to the SSH control master you can configure Zed to set them. When typing in the "Connect New Server" dialog, you can use bash-style quoting to pass options containing a space. Once you have created a server it will be added to the `"ssh_connections": []` array in your settings file. You can edit the settings file directly to make changes to SSH connections. diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 29ecd9bc850b919dbc63a87e2f1bf9477901a33d..e84210d0fadef1598776b1ec51a3f19cdb2ac0c0 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -35,7 +35,7 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead ## Known Limitations -- Only the first prefix is used when an list of prefixes is passed in. +- Only the first prefix is used when a list of prefixes is passed in. - Currently only the `json` snippet file format is supported, even though the `simple-completion-language-server` supports both `json` and `toml` file formats. ## See also diff --git a/docs/src/tab-switcher.md b/docs/src/tab-switcher.md new file mode 100644 index 0000000000000000000000000000000000000000..5cc72be449c94c38fbe4814893595289cb499b5a --- /dev/null +++ b/docs/src/tab-switcher.md @@ -0,0 +1,46 @@ +# Tab Switcher + +The Tab Switcher provides a quick way to navigate between open tabs in Zed. It +displays a list of your open tabs sorted by recent usage, making it easy to jump +back to whatever you were just working on. + +![Tab Switcher with multiple panes](https://zed.dev/img/features/tab-switcher.png) + +## Quick Switching + +When the Tab Switcher is opened using {#kb tab_switcher::Toggle}, instead of +running the {#action tab_switcher::Toggle} from the command palette, it'll stay +active as long as the ctrl key is held down. + +While holding down ctrl, each subsequent tab press cycles to the next item (shift to cycle backwards) and, when ctrl is released, the selected item is confirmed and +the switcher is closed. + +## Opening the Tab Switcher + +The Tab Switcher can also be opened with either {#action tab_switcher::Toggle} +or {#action tab_switcher::ToggleAll}. Using {#kb tab_switcher::Toggle} will show +only the tabs for the current pane, while {#kb tab_switcher::ToggleAll} shows +all tabs for all panes. + +While the Tab Switcher is open, you can: + +- Press {#kb menu::SelectNext} to move to the next tab in the list +- Press {#kb menu::SelectPrevious} to move to the previous tab +- Press enter to confirm the selected tab and close the switcher +- Press escape to close the switcher and return to the original tab from which + the switcher was opened +- Press {#kb tab_switcher::CloseSelectedItem} to close the currently selected tab + +As you navigate through the list, Zed will update the pane's active item to +match the selected tab. + +## Action Reference + +| Action | Description | +| ----------------------------------------- | ------------------------------------------------- | +| {#action tab_switcher::Toggle} | Open the Tab Switcher for the current pane | +| {#action tab_switcher::ToggleAll} | Open the Tab Switcher showing tabs from all panes | +| {#action tab_switcher::CloseSelectedItem} | Close the selected tab in the Tab Switcher | diff --git a/docs/src/themes.md b/docs/src/themes.md index 0bbea57ebfd7c9d55031c2ca9ff31b67b360bcdd..615cd2c7b38a734af071ef373b75350231f4a5fb 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -51,7 +51,15 @@ For example, add the following to your `settings.json` if you wish to override t "comment.doc": { "font_style": "italic" } - } + }, + "accents": [ + "#ff0000", + "#ff7f00", + "#ffff00", + "#00ff00", + "#0000ff", + "#8b00ff" + ] } } } diff --git a/docs/src/toolchains.md b/docs/src/toolchains.md index 68e7baa8cf225d85862eadb0ef02674f84b59fd2..f9f5f3fe0e8164b0580786795df0b286a2a7760a 100644 --- a/docs/src/toolchains.md +++ b/docs/src/toolchains.md @@ -8,7 +8,7 @@ With toolchain selector, you don't need to spend time configuring your language You can even select different toolchains for different subprojects within your Zed project. A definition of a subproject is language-specific. In collaborative scenarios, only the project owner can see and modify an active toolchain. -In [remote projects](./remote-development.md), you can use the toolchain selector to control the active toolchain on the SSH host. When [sharing your project](./collaboration.md), the toolchain selector is not available to guests. +In [remote projects](./remote-development.md), you can use the toolchain selector to control the active toolchain on the SSH host. When [sharing your project](./collaboration/overview.md), the toolchain selector is not available to guests. ## Why do we need toolchains? diff --git a/docs/src/uninstall.md b/docs/src/uninstall.md index f2d7da93e78b71c607e79b0bdd5d017f88d55f4d..c1f71a6609ff1b73e15171802441f9aebc8f09cb 100644 --- a/docs/src/uninstall.md +++ b/docs/src/uninstall.md @@ -110,4 +110,4 @@ If you encounter issues during uninstallation: - **Linux**: If the uninstall script fails, check the error message and consider manual removal of the directories listed above. - **All platforms**: If you want to start fresh while keeping Zed installed, you can delete the configuration directories instead of uninstalling the application entirely. -For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community). +For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community-links). diff --git a/docs/src/vim.md b/docs/src/vim.md index c9a0cd09f2dafb9f07a26ef07b71205f5ddbdf15..09baa9b54f7e1aeb5f16777f4292131315d18928 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -471,7 +471,7 @@ But you cannot use the same shortcuts to move between all the editor docks (the } ``` -Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. +Subword motion, which allows you to navigate and select individual words in `camelCase` or `snake_case`, is not enabled by default. To enable it, add these bindings to your keymap. ```json [settings] { @@ -485,6 +485,9 @@ Subword motion, which allows you to navigate and select individual words in came } ``` +> Note: Operations like `dw` remain unaffected. If you would like operations to +> also use subword motion, remove `vim_mode != operator` from the `context`. + Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. ```json [settings] @@ -566,7 +569,8 @@ You can change the following settings to modify vim mode's behavior: | use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | | use_multiline_find | deprecated | | use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | -| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | +| toggle_relative_line_numbers | deprecated | false | +| relative_line_numbers | If "enabled", line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | "disabled" | | custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | | highlight_on_yank_duration | The duration of the highlight animation(in ms). Set to `0` to disable | 200 | @@ -590,7 +594,7 @@ Here's an example of these settings changed: "default_mode": "insert", "use_system_clipboard": "never", "use_smartcase_find": true, - "toggle_relative_line_numbers": true, + "relative_line_numbers": "enabled", "highlight_on_yank_duration": 50, "custom_digraphs": { "fz": "🧟‍♀️" diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 98b07797a2f7904acd10fe54b04ab39fe0854667..234776b1d3223a4b8634b42df1973a27c736616c 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -118,6 +118,7 @@ To disable this behavior use: "show_project_items": true, // Show/hide project host and name "show_onboarding_banner": true, // Show/hide onboarding banners "show_user_picture": true, // Show/hide user avatar + "show_user_menu": true, // Show/hide app user button "show_sign_in": true, // Show/hide sign-in button "show_menus": false // Show/hide menus }, @@ -374,6 +375,8 @@ TBD: Centered layout related settings "lsp_document_colors": "inlay", // none, inlay, border, background // When to show the scrollbar in the completion menu. "completion_menu_scrollbar": "never", // auto, system, always, never + // Turn on colorization of brackets in editors (configurable per language) + "colorize_brackets": true, ``` ### Edit Predictions {#editor-ai} @@ -457,6 +460,8 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k // When to show indent guides in the project panel. (always, never) "show": "always" }, + // Sort order for entries (directories_first, mixed, files_first) + "sort_mode": "directories_first", // Whether to hide the root entry when only one folder is open in the window. "hide_root": false, // Whether to hide the hidden entries in the project panel. diff --git a/docs/src/windows.md b/docs/src/windows.md index 34a553dd5b032915ed52651f7f02b737995b959b..b7b4b6b7bf153a2cae7cbf2b7168d502cfbdaeb0 100644 --- a/docs/src/windows.md +++ b/docs/src/windows.md @@ -6,6 +6,14 @@ Get the latest stable builds via [the download page](https://zed.dev/download). You can also build zed from source, see [these docs](https://zed.dev/docs/development/windows) for instructions. +### Package managers + +Additionally, you can install Zed using winget: + +```sh +winget install -e --id ZedIndustries.Zed +``` + ## Uninstall - Installed via installer: Use `Settings` → `Apps` → `Installed apps`, search for Zed, and click Uninstall. diff --git a/docs/src/worktree-trust.md b/docs/src/worktree-trust.md new file mode 100644 index 0000000000000000000000000000000000000000..590f063a75ac5d77e60d50f03af4795d6ec2961f --- /dev/null +++ b/docs/src/worktree-trust.md @@ -0,0 +1,58 @@ +# Zed and trusted worktrees + +A worktree in Zed is either a directory or a single file that Zed opens as a standalone "project". +Zed opens a worktree every time `zed some/path` is invoked, on drag and dropping a file or directory into Zed, on opening user settings.json, etc. + +Every worktree opened may contain a `.zed/settings.json` file with extra configuration options that may require installing and spawning language servers or MCP servers. +In order to provide users the opportunity to make their own choices according to their unique threat model and risk tolerance, all worktrees will be started in Restricted mode, which prevents download and execution of any related items from `.zed/settings.json`. Until configured to trust the worktree(s), Zed will not perform any related untrusted actions and will wait for user confirmation. This gives users a chance to review and understand any pre-configured settings, MCP servers, or language servers associated with a project. + +Note that at this point, Zed trusts the tools it installs itself, hence global entities such as global MCP servers, language servers like prettier and copilot are still in installed and started as usual, independent of worktree trust. + +If a worktree is not trusted, Zed will indicate this with an exclamation mark icon in the title bar. Clicking this icon or using `workspace::ToggleWorktreeSecurity` action will bring up the security modal that allows the user to trust the worktree. + +Trusting any worktree will persist this information between restarts. It's possible to clear all trusted worktrees with `workspace::ClearTrustedWorktrees` command. +This command will restart Zed, to ensure no untrusted settings, language servers or MCP servers persist. + +This feature works locally and on SSH and WSL remote hosts. Zed tracks trust information per host in these cases. + +## What is restricted + +Restricted Mode prevents: + +- Project settings (`.zed/settings.json`) from being parsed and applied +- Language servers from being installed and spawned +- MCP servers from being installed and spawned + +## Configuring broad worktree trust + +By default, Zed won't trust any new worktrees and users will be required to trust each new worktree. Though not recommended, users may elect to trust all worktrees by configuring the following setting: + +```json [settings] +"session": { + "trust_all_worktrees": true +} +``` + +Note that auto trusted worktrees are not persisted between restarts, only manually trusted worktrees are. This ensures that new trust decisions must be made if a users elects to disable the `trust_all_worktrees` setting. + +## Trust hierarchy + +These are mostly internal details and may change in the future, but are helpful to understand how multiple different trust requests can be approved at once. +Zed has multiple layers of trust, based on the requests, from the least to most trusted level: + +- "single file worktree" + +After opening an empty Zed it's possible to open just a file, same as after opening a directory in Zed it's possible to open a file outside of this directory. +A typical scenario where a directory might be open and a single file is subsequently opened is opening Zed's settings.json file via `zed: open settings file` command: that starts a language server for a new file open, which originates from a newly created, single file worktree. + +Spawning a language server presents a risk should the language server experience a supply-chain attack; therefore, Zed restricts that by default. Each single file worktree requires a separate trust grant, unless the directory containing it is trusted or all worktrees are trusted. + +- "directory worktree" + +If a directory is open in Zed, it's a full worktree which may spawn multiple language servers associated with it or spawn MCP servers if contained in a project settings file.Therefore, each directory worktree requires a separate trust grant unless a parent directory worktree trust is granted (see below). + +When a directory worktree is trusted, language and MCP servers are permitted to be downloaded and started, hence we also enable single file worktree trust for the host in question automatically when this occurs: this helps when opening single files when using language server features in the trusted directory worktree. + +- "parent directory worktree" + +To permit trust decisions for multiple directory worktrees at once, it's possible to trust all subdirectories of a given parent directory worktree opened in Zed by checking the appropriate checkbox. This will grant trust to all its subdirectories, including all current and potential directory worktrees. diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index 9f2afad54388bf2289e57f43275cdf2b4d98d4dd..ff0ba711204b34c9f937e944154ec89d0f877bb7 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -13,160 +13,67 @@ a > .hljs { color: var(--links); } -/* - body-container is necessary because mobile browsers don't seem to like - overflow-x on the body tag when there is a tag. -*/ -#body-container { - /* - This is used when the sidebar pushes the body content off the side of - the screen on small screens. Without it, dragging on mobile Safari - will want to reposition the viewport in a weird way. - */ - overflow-x: clip; -} - -.large-logo-img { - display: block; -} - .icon-logo-img { - display: none; + display: block; } -/* Menu Bar */ - -#menu-bar, -#menu-bar-hover-placeholder { - z-index: 101; - margin: auto calc(0px - var(--page-padding)); -} -#menu-bar { - padding: 12px 16px; - position: relative; - display: flex; - flex-wrap: wrap; - background-color: var(--bg); - border-block-end-color: var(--bg); - border-block-end-width: 1px; - border-block-end-style: solid; -} -#menu-bar.sticky, -.js #menu-bar-hover-placeholder:hover + #menu-bar, -.js #menu-bar:hover, -.js.sidebar-visible #menu-bar { - position: -webkit-sticky; - position: sticky; - top: 0 !important; -} -#menu-bar-hover-placeholder { - position: sticky; - position: -webkit-sticky; - top: 0; - height: var(--menu-bar-height); -} -#menu-bar.bordered { - border-block-end-color: var(--divider); -} -#menu-bar i, -#menu-bar .icon-button { +.icon-button { position: relative; - height: 3rem; - width: 3rem; + height: 28px; + width: 28px; z-index: 10; display: flex; align-items: center; justify-content: center; cursor: pointer; transition: color 0.5s; + border: 0; + background-color: transparent; + border-radius: 4px; + color: var(--icons); } -#menu-bar .icon-button:hover { - background-color: var(--icon-btn-bg-hover); -} - -@media only screen and (max-width: 420px) { - .large-logo-img { - display: none; - } - .icon-logo-img { - display: block; - } - - #menu-bar { - padding: 12px; - } - - #menu-bar .ib-hidden-mobile { - display: none; - } - - .right-buttons { - width: 100px; /*For center aligning the icon link*/ - } +.icon-button:hover { + color: var(--icons-hover); + background-color: var(--icon-btn-bg-hover); } -.icon-button { - border: none; - background: none; - padding: 0; - color: inherit; -} -.icon-button i { - margin: 0; +.ib-hidden-desktop { + display: none; } -.right-buttons { +.header-bar { + position: sticky; + top: 0; + z-index: 100; + padding: 12px 24px; + background-color: var(--sidebar-bg); + border-bottom: 1px solid var(--divider); display: flex; align-items: center; - justify-content: end; -} - -.right-buttons a { - text-decoration: none; + justify-content: space-between; + flex-shrink: 0; } -.left-buttons { +.header-bar .left-container { + width: 160px; display: flex; align-items: center; - gap: 0.5rem; -} -.no-js .left-buttons button { - display: none; + gap: 8px; } -.menu-title { - display: inline-flex; - justify-content: center; +.header-bar .right-container { + width: 160px; + display: flex; align-items: center; - flex: 1; - overflow: hidden; - filter: var(--logo-brightness); -} -.js .menu-title { - cursor: pointer; -} - -.menu-bar, -.menu-bar:visited, -.nav-chapters, -.nav-chapters:visited, -.mobile-nav-chapters, -.mobile-nav-chapters:visited, -.menu-bar .icon-button, -.menu-bar a i { - color: var(--icons); + gap: 4px; } -.menu-bar i:hover, -.menu-bar .icon-button:hover, -.nav-chapters:hover, -.mobile-nav-chapters i:hover { - color: var(--icons-hover); +.logo-nav { + display: block; + filter: var(--logo-brightness); } -/* Nav Icons */ - .nav-chapters { font-size: 2.5em; text-align: center; @@ -353,7 +260,7 @@ pre > .buttons button { border-width: 1px; border-radius: 4px; border-color: var(--border); - background-color: var(--theme-popup-bg); + background-color: var(--popover-bg); transition: 100ms; transition-property: color, border-color, background-color; color: var(--icons); @@ -444,6 +351,8 @@ mark.fade-out { #searchbar:focus, #searchbar.active { box-shadow: 0 0 3px var(--searchbar-shadow-color); + outline: none; + border-color: var(--search-mark-bg); } .searchresults-header { @@ -456,16 +365,9 @@ mark.fade-out { color: var(--searchresults-header-fg); } -.searchresults-outer { - margin-inline-start: auto; - margin-inline-end: auto; - max-width: var(--content-max-width); - border-block-end: 1px dashed var(--searchresults-border-color); -} - ul#searchresults { list-style: none; - padding-inline-start: 20px; + padding-inline-start: 0; } ul#searchresults li { margin: 10px 0px; @@ -477,35 +379,36 @@ ul#searchresults li.focus { } ul#searchresults span.teaser { display: block; - clear: both; - margin-block-start: 5px; - margin-block-end: 0; - margin-inline-start: 20px; - margin-inline-end: 0; font-size: 0.8em; + margin-block-start: 5px; + margin-inline-start: 4px; + padding-inline-start: 2ch; + border-left: 1px solid var(--divider); } ul#searchresults span.teaser em { font-weight: bold; - font-style: normal; + color: var(--full-contrast); + background: var(--code-bg); } /* Sidebar */ .sidebar { - position: fixed; - left: 0; - top: 0; - bottom: 0; + position: relative; width: var(--sidebar-width); + flex-shrink: 0; + display: flex; + flex-direction: column; font-size: 0.875em; box-sizing: border-box; -webkit-overflow-scrolling: touch; - overscroll-behavior-y: contain; + overscroll-behavior-y: none; + overflow: hidden; background-color: var(--sidebar-bg); color: var(--sidebar-fg); - border-right: 1px solid; - border-color: var(--divider); + border-right: 1px solid var(--divider); } + [dir="rtl"] .sidebar { left: unset; right: 0; @@ -524,14 +427,11 @@ ul#searchresults span.teaser em { line-height: 2em; } .sidebar .sidebar-scrollbox { + flex: 1; overflow-y: auto; - position: absolute; - top: 0; - bottom: 0; - left: 0; - right: 0; - padding: 12px 12px 12px 24px; + min-height: 0; } + .sidebar .sidebar-resize-handle { position: absolute; cursor: col-resize; @@ -561,18 +461,6 @@ ul#searchresults span.teaser em { var(--sidebar-resize-indicator-space) ); } -/* sidebar-hidden */ -#sidebar-toggle-anchor:not(:checked) ~ .sidebar { - transform: translateX( - calc(0px - var(--sidebar-width) - var(--sidebar-resize-indicator-width)) - ); - z-index: -1; -} -[dir="rtl"] #sidebar-toggle-anchor:not(:checked) ~ .sidebar { - transform: translateX( - calc(var(--sidebar-width) + var(--sidebar-resize-indicator-width)) - ); -} .sidebar::-webkit-scrollbar { background: var(--sidebar-bg); } @@ -580,30 +468,33 @@ ul#searchresults span.teaser em { background: var(--scrollbar); } -/* sidebar-visible */ -#sidebar-toggle-anchor:checked ~ .page-wrapper { - transform: translateX( - calc(var(--sidebar-width) + var(--sidebar-resize-indicator-width)) - ); -} -[dir="rtl"] #sidebar-toggle-anchor:checked ~ .page-wrapper { - transform: translateX( - calc(0px - var(--sidebar-width) - var(--sidebar-resize-indicator-width)) - ); -} -@media only screen and (min-width: 620px) { - #sidebar-toggle-anchor:checked ~ .page-wrapper { - transform: none; - margin-inline-start: var(--sidebar-width); +@media only screen and (max-width: 780px) { + .sidebar { + position: fixed; + top: 0; + left: 0; + height: 100vh; + padding-top: 57px; /* Account for header height */ + transform: translateX(-100%); + z-index: 99; + transition: transform 0.1s ease; + } + + [dir="rtl"] .sidebar { + left: unset; + right: 0; + transform: translateX(100%); } - [dir="rtl"] #sidebar-toggle-anchor:checked ~ .page-wrapper { - transform: none; + + body.sidebar-open .sidebar { + box-shadow: var(--sidebar-mobile-shadow); + transform: translateX(0); } } .chapter { list-style: none outside none; - padding-inline-start: 0; + padding: 8px 20px 20px 20px; line-height: 2.2em; margin: 0; } @@ -616,9 +507,10 @@ ul#searchresults span.teaser em { display: flex; color: var(--sidebar-non-existant); } + .chapter li a { display: block; - padding: 0; + padding: 0 4px; text-decoration: none; color: var(--sidebar-fg); } @@ -634,74 +526,102 @@ ul#searchresults span.teaser em { .chapter li > a.toggle { cursor: pointer; - display: block; + display: flex; + align-items: center; + justify-content: center; margin-inline-start: auto; - padding: 0 10px; user-select: none; - opacity: 0.68; -} - -.chapter li > a.toggle div { - transition: transform 0.5s; + opacity: 0.5; + border-radius: 4px; + transition: + opacity 0.15s ease, + background-color 0.15s ease; } -/* collapse the section */ -.chapter li:not(.expanded) + li > ol { - display: none; +.chapter li > a.toggle:hover { + opacity: 1; + background-color: var(--theme-hover); } .chapter li.chapter-item { + display: flex; + flex-wrap: wrap; + align-items: center; line-height: 1.5em; margin-block-start: 0.6em; } +.chapter li.chapter-item > a:first-child { + flex: 1; + min-width: 0; +} + .chapter li.expanded > a.toggle div { transform: rotate(90deg); } -.spacer { - width: 100%; - height: 3px; - margin: 5px 0px; +.chapter li.part-title { + font-size: 1.4rem; + padding: 0 8px 0 4px; + color: var(--title-color); + cursor: pointer; + user-select: none; + display: flex; + align-items: center; + justify-content: space-between; + line-height: auto; + border-radius: 2px; } -.chapter .spacer { - background-color: var(--divider); + +.chapter li.part-title.collapsible:hover { + background-color: var(--hover-section-title); } -@media (-moz-touch-enabled: 1), (pointer: coarse) { - .chapter li a { - padding: 5px 0; - } - .spacer { - margin: 10px 0; - } +.chapter li.part-title.collapsible::after { + content: "❯"; + display: inline-block; + font-size: 1.2rem; + opacity: 0.6; + transition: transform 0.2s ease; + flex-shrink: 0; +} + +.chapter li.part-title.collapsible.expanded::after { + transform: rotate(90deg); +} + +.chapter li.section-spacer { + height: 2rem; + list-style: none; +} + +.chapter li.section-hidden { + display: none !important; } .section { list-style: none outside none; - padding-inline-start: 20px; + padding-inline-start: 3ch; line-height: 1.9em; } -/* Theme Menu Popup */ - .theme-popup { position: absolute; - left: 32px; - top: calc(var(--menu-bar-height) - 12px); + right: 155px; + top: calc(var(--menu-bar-height) - 18px); z-index: 1000; border-radius: 4px; font-size: 1.4rem; color: var(--fg); - background: var(--theme-popup-bg); - border: 1px solid var(--theme-popup-border); + background: var(--popover-bg); + border: 1px solid var(--popover-border); margin: 0; padding: 0; list-style: none; display: none; - /* Don't let the children's background extend past the rounded corners. */ overflow: hidden; } + [dir="rtl"] .theme-popup { left: unset; right: 10px; @@ -737,6 +657,8 @@ ul#searchresults span.teaser em { } .download-button { + max-height: 28px; + margin-left: 8px; background: var(--download-btn-bg); color: var(--download-btn-color); padding: 4px 8px; @@ -745,6 +667,7 @@ ul#searchresults span.teaser em { font-size: 1.4rem; border-radius: 4px; box-shadow: var(--download-btn-shadow) 0px -2px 0px 0px inset; + text-decoration: none; transition: 100ms; transition-property: box-shadow, border-color, background-color; } @@ -754,3 +677,135 @@ ul#searchresults span.teaser em { border-color: var(--download-btn-border-hover); box-shadow: none; } + +.search-button { + min-width: 100px; + max-width: 300px; + height: 28px; + width: 100%; + padding: 4px 4px 4px 8px; + display: flex; + gap: 8px; + background: var(--search-btn-bg); + border: 1px solid; + border-color: var(--search-btn-border); + font-size: 1.4rem; + font-family: var(--font); + color: var(--icons); + border-radius: 4px; + transition: 100ms; + transition-property: box-shadow, border-color, background-color; +} + +.search-button:hover { + background: var(--search-btn-bg-hover); +} + +.search-button .icon { + width: 12px; + height: 12px; + transform: translateY(10%); + scale: 0.9; +} + +.search-content-desktop { + width: 100%; + display: flex; + justify-content: space-between; +} + +.search-content-mobile { + display: none; +} + +.search-container { + box-sizing: border-box; + position: fixed; + inset: 0; + z-index: 1000; + padding: 24px; + padding-top: 72px; + background-color: rgba(0, 0, 0, 0.5); + display: none; + justify-content: center; +} + +.search-container:has(#search-wrapper:not(.hidden)) { + display: flex; +} + +.search-modal { + box-sizing: border-box; + + max-width: 600px; + min-width: 600px; + height: fit-content; + max-height: 600px; + display: flex; + flex-direction: column; + padding: 16px; + overflow-y: auto; + + border-radius: 8px; + background: var(--popover-bg); + border: 1px solid var(--popover-border); + box-shadow: var(--popover-shadow); +} + +.searchbar-outer { + width: 100%; +} + +#searchbar { + margin: 0; +} + +@media only screen and (max-width: 780px) { + .header-bar { + padding: 16px; + justify-content: start; + } + + .download-button { + display: none; + } + + .ib-hidden-mobile { + display: none; + } + + .header-bar .left-container { + width: fit-content; + } + + .header-bar .right-container { + width: fit-content; + } + + .search-button { + width: 100px; + margin-left: auto; + margin-right: 8px; + } + + .ib-hidden-desktop { + display: block; + } + + .search-modal { + width: 90vw; + min-width: auto; + } + + .search-content-desktop { + display: none; + } + + .search-content-mobile { + display: flex; + } + + .theme-popup { + right: 15px; + } +} diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index df20d35b53ebb173140e63ebfa0175236236ee14..9d4791ea40125b520ed8d73f7e7dc942a58cce32 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -23,7 +23,16 @@ html { body { margin: 0; font-size: 1.6rem; - overflow-x: hidden; + overflow: hidden; + height: 100vh; + overscroll-behavior-y: none; +} + +#body-container { + display: flex; + flex-direction: column; + height: 100vh; + overflow: hidden; } code { @@ -37,6 +46,17 @@ main { overflow-wrap: break-word; } +.noise-pattern { + pointer-events: none; + user-select: none; + z-index: 105; + position: absolute; + inset: 0; + background-size: 180px; + background-repeat: repeat; + opacity: var(--noise-opacity); +} + /* make wide tables scroll if they overflow */ .table-wrapper { overflow-x: auto; @@ -48,6 +68,7 @@ h3, h4, h5, h6 { + position: relative; font-family: var(--title-font); font-weight: 480; color: var(--title-color); @@ -91,6 +112,9 @@ h2 { h3 { font-size: 2rem; + padding-bottom: 0.8rem; + border-bottom: 1px dashed; + border-color: var(--border-light); } h4 { @@ -109,6 +133,14 @@ h5 { margin-block-end: 0; } +code:focus-visible, +pre:focus-visible, +li:focus-visible, +button:focus-visible, +a:focus-visible { + outline: 3px solid #094ece80; +} + .header + .header h3, .header + .header h4, .header + .header h5 { @@ -121,10 +153,9 @@ h3:target::before, h4:target::before, h5:target::before, h6:target::before { - display: inline-block; content: "»"; - margin-inline-start: -30px; - width: 30px; + position: absolute; + left: -1.5ch; } hr { @@ -144,17 +175,27 @@ hr { scroll-margin-top: calc(var(--menu-bar-height) + 2rem); } -.page { - outline: 0; - padding: 0 var(--page-padding); - margin-block-start: calc( - 0px - var(--menu-bar-height) - ); /* Compensate for the #menu-bar-hover-placeholder */ -} .page-wrapper { box-sizing: border-box; background-color: var(--bg); + display: flex; + flex: 1; + overflow: hidden; + min-height: 0; +} + +.page { + outline: 0; + flex: 1; + display: flex; + flex-direction: column; + overflow-x: hidden; + overflow-y: auto; + overscroll-behavior-y: none; + min-width: 0; + position: relative; } + .no-js .page-wrapper, .js:not(.sidebar-resizing) .page-wrapper { transition: @@ -168,18 +209,23 @@ hr { } .content { - overflow-y: auto; - padding: 48px 4px; + padding: 48px 32px 0 32px; + display: flex; + justify-content: space-between; + gap: 36px; } + .content main { margin-inline-start: auto; margin-inline-end: auto; max-width: var(--content-max-width); } + .content p { line-height: 1.625em; } .content div.video { + z-index: 150; margin-top: 1rem; border: 1px solid; border-color: var(--border); @@ -213,6 +259,8 @@ hr { } .content img, .content video { + position: relative; + z-index: 150; max-width: 100%; background-color: var(--media-bg); border: 1px solid; @@ -333,7 +381,7 @@ blockquote .warning:before { kbd { background-color: rgba(8, 76, 207, 0.1); border-radius: 4px; - border: solid 1px var(--theme-popup-border); + border: solid 1px var(--popover-border); box-shadow: inset 0 -1px 0 var(--theme-hover); display: inline-block; font-size: var(--code-font-size); @@ -378,15 +426,6 @@ kbd { visibility: visible; } -.chapter li.part-title { - font-size: 18px; - font-family: var(--title-font); - font-weight: 520; - color: var(--title-color); - margin: 5px 0; - margin-top: 2rem; -} - .result-no-output { font-style: italic; } @@ -395,3 +434,19 @@ code:not(pre code).hljs { color: var(--code-text) !important; background-color: var(--code-bg) !important; } + +@media only screen and (max-width: 1020px) { + .content { + padding: 16px 32px 0 32px; + } + + .content main { + width: 100%; + } +} + +@media only screen and (max-width: 400px) { + .content { + padding: 16px 16px 0 16px; + } +} diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index dceba25af87e62ee64459984950d3e6921421d39..285540c6dcd362478a7df84fa1e8c7ebd07c5a3e 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -5,11 +5,11 @@ --logo-brightness: brightness(1); - --sidebar-width: 300px; + --sidebar-width: 280px; --sidebar-resize-indicator-width: 0px; --sidebar-resize-indicator-space: 2px; --page-padding: 15px; - --content-max-width: 750px; + --content-max-width: 690px; --menu-bar-height: 64px; --font: "IA Writer Quattro S", sans-serif; --title-font: "Lora", "Helvetica Neue", Helvetica, Arial, sans-serif; @@ -19,6 +19,7 @@ --code-font-size: 0.875em /* please adjust the ace font size accordingly in editor.js */; + --noise-opacity: 0.024; --bg: hsla(50, 25%, 96%); --fg: hsl(220, 13%, 34%); --title-color: hsl(220, 92%, 42%); @@ -29,12 +30,14 @@ --media-bg: hsl(50, 25%, 92%); + --sidebar-bg: hsla(50, 25%, 94%); --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; --sidebar-active: hsl(220, 93%, 42%); --sidebar-active-bg: hsl(220, 93%, 42%, 0.1); + --sidebar-mobile-shadow: 0px 16px 16px hsl(0, 0%, 0%, 0.1); - --divider: hsl(220, 93%, 42%, 0.15); + --divider: hsl(220, 50%, 45%, 0.1); --scrollbar: #8f8f8f; --icons: #747474; @@ -56,9 +59,13 @@ --pre-border: hsla(220, 93%, 42%, 0.3); --pre-shadow: hsla(220, 93%, 42%, 0.07); - --theme-popup-bg: #fafafa; - --theme-popup-border: #cccccc; + --popover-bg: #fafafa; + --popover-border: #cccccc; + --popover-shadow: + 0 10px 15px -3px hsl(0, 0%, 0%, 0.1), 0 4px 6px -4px hsl(0, 0%, 0%, 0.1); + --theme-hover: #e6e6e6; + --hover-section-title: hsl(50, 25%, 88%); --quote-bg: hsl(197, 37%, 96%); --quote-border: hsl(197, 37%, 84%); @@ -71,12 +78,14 @@ --table-border-color: hsl(220, 93%, 42%, 0.15); --table-alternate-bg: hsl(220, 10%, 90%, 0.4); + --toc-link-underline: hsl(0, 0%, 0%, 0.1); + --toc-link-underline-hover: hsl(0, 0%, 0%, 0.5); + --searchbar-border-color: #aaa; --searchbar-bg: #fafafa; --searchbar-fg: #000; --searchbar-shadow-color: #aaa; --searchresults-header-fg: #666; - --searchresults-border-color: #888; --searchresults-li-bg: #e4f2fe; --search-mark-bg: #a2cff5; @@ -87,6 +96,10 @@ --download-btn-border-hover: hsla(220, 60%, 50%, 0.2); --download-btn-shadow: hsla(220, 40%, 60%, 0.1); + --search-btn-bg: hsl(220, 100%, 100%); + --search-btn-bg-hover: hsla(50, 25%, 97%); + --search-btn-border: hsl(220, 50%, 45%, 0.2); + --toast-bg: hsla(220, 93%, 98%); --toast-border: hsla(220, 93%, 42%, 0.3); --toast-border-success: hsla(120, 73%, 42%, 0.3); @@ -103,7 +116,8 @@ --logo-brightness: brightness(2); - --bg: hsl(220, 13%, 10%); + --noise-opacity: 0.012; + --bg: hsl(220, 13%, 7.5%); --fg: hsl(220, 14%, 70%); --title-color: hsl(220, 92%, 80%); @@ -113,13 +127,14 @@ --media-bg: hsl(220, 13%, 8%); - --sidebar-bg: hsl(220, 13%, 10%); + --sidebar-bg: hsl(220, 13%, 6.5%); --sidebar-fg: hsl(220, 14%, 71%); --sidebar-non-existant: #505254; --sidebar-active: hsl(220, 92%, 75%); --sidebar-active-bg: hsl(220, 93%, 42%, 0.25); + --sidebar-mobile-shadow: 0px 16px 16px hsl(0, 0%, 0%, 0.6); - --divider: hsl(220, 13%, 20%); + --divider: hsl(220, 13%, 12%); --scrollbar: hsl(220, 13%, 30%); --icons: hsl(220, 14%, 71%); @@ -140,9 +155,13 @@ --pre-border: hsla(220, 93%, 70%, 0.3); --pre-shadow: hsla(220, 93%, 70%, 0.1); - --theme-popup-bg: hsl(220, 13%, 15%); - --theme-popup-border: hsl(220, 13%, 20%); + --popover-bg: hsl(220, 13%, 8%); + --popover-border: hsl(220, 13%, 20%); + --popover-shadow: + 0 10px 15px -3px hsl(0, 0%, 0%, 0.1), 0 4px 6px -4px hsl(0, 0%, 0%, 0.1); + --theme-hover: hsl(220, 13%, 25%); + --hover-section-title: hsl(220, 13%, 11%); --quote-bg: hsl(220, 13%, 25%, 0.4); --quote-border: hsl(220, 13%, 32%, 0.5); @@ -151,6 +170,9 @@ --table-header-bg: hsl(220, 13%, 25%, 0.5); --table-alternate-bg: hsl(220, 13%, 20%, 0.4); + --toc-link-underline: hsl(255, 100%, 100%, 0.1); + --toc-link-underline-hover: hsl(255, 100%, 100%, 0.4); + --warning-border: hsl(25, 100%, 85%, 0.2); --warning-bg: hsl(42, 100%, 40%, 0.1); --warning-icon: hsl(42, 100%, 80%); @@ -160,7 +182,6 @@ --searchbar-fg: hsl(220, 14%, 71%); --searchbar-shadow-color: hsl(220, 13%, 15%); --searchresults-header-fg: hsl(220, 14%, 60%); - --searchresults-border-color: hsl(220, 13%, 30%); --searchresults-li-bg: hsl(220, 13%, 25%); --search-mark-bg: hsl(220, 93%, 60%); @@ -171,6 +192,10 @@ --download-btn-border-hover: hsla(220, 90%, 80%, 0.4); --download-btn-shadow: hsla(220, 50%, 60%, 0.15); + --search-btn-bg: hsl(220, 90%, 90%, 0.05); + --search-btn-bg-hover: hsl(220, 90%, 90%, 0.1); + --search-btn-border: hsla(220, 90%, 80%, 0.1); + --toast-bg: hsla(220, 20%, 98%, 0.05); --toast-border: hsla(220, 93%, 70%, 0.2); --toast-border-success: hsla(120, 90%, 60%, 0.3); diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 052d983483708699333c6ca308509d306cbc7566..98f64d41c3eb86dfb335ecf0964f434c50fad0bb 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -1,8 +1,31 @@ - + + + + {{ title }} {{#if is_print }} @@ -48,161 +71,227 @@ {{/if}} - +
+
+ - - - - - - - - - - - - +
-
- {{> header}} - - - - {{#if search_enabled}} -

( + iterations: usize, + expected_pass_ratio: f32, + mut processor: P, + evalf: impl Fn() -> EvalOutput + Send + Sync + 'static, +) where + P: EvalOutputProcessor, +{ + let mut evaluated_count = 0; + let mut failed_count = 0; + let evalf = Arc::new(evalf); + report_progress(evaluated_count, failed_count, iterations); + + let (tx, rx) = mpsc::channel(); + + let executor = gpui::background_executor(); + let semaphore = Arc::new(smol::lock::Semaphore::new(32)); + let evalf = Arc::new(evalf); + // Warm the cache once + let first_output = evalf(); + tx.send(first_output).ok(); + + for _ in 1..iterations { + let tx = tx.clone(); + let semaphore = semaphore.clone(); + let evalf = evalf.clone(); + executor + .spawn(async move { + let _guard = semaphore.acquire().await; + let output = evalf(); + tx.send(output).ok(); + }) + .detach(); + } + drop(tx); + + let mut failed_evals = Vec::new(); + let mut errored_evals = HashMap::new(); + while let Ok(output) = rx.recv() { + processor.process(&output); + + match output.outcome { + OutcomeKind::Passed => {} + OutcomeKind::Failed => { + failed_count += 1; + failed_evals.push(output); + } + OutcomeKind::Error => { + failed_count += 1; + *errored_evals.entry(output.data).or_insert(0) += 1; + } + } + + evaluated_count += 1; + report_progress(evaluated_count, failed_count, iterations); + } + + let actual_pass_ratio = (iterations - failed_count) as f32 / iterations as f32; + println!("Actual pass ratio: {}\n", actual_pass_ratio); + if actual_pass_ratio < expected_pass_ratio { + for (error, count) in errored_evals { + println!("Eval errored {} times. Error: {}", count, error); + } + + for failed in failed_evals { + println!("Eval failed"); + println!("{}", failed.data); + } + + panic!( + "Actual pass ratio: {}\nExpected pass ratio: {}", + actual_pass_ratio, expected_pass_ratio + ); + } + + processor.assert(); +} diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index e9f1c71908b633362b349df451f8e9743269412a..307a3a19bd5ec6502270ae2f579cbd6b6f378746 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -25,7 +25,8 @@ language.workspace = true log.workspace = true lsp.workspace = true parking_lot.workspace = true -semantic_version.workspace = true +proto.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true task.workspace = true @@ -36,4 +37,8 @@ wasm-encoder.workspace = true wasmparser.workspace = true [dev-dependencies] +fs = { workspace = true, "features" = ["test-support"] } +gpui = { workspace = true, "features" = ["test-support"] } +indoc.workspace = true pretty_assertions.workspace = true +tempfile.workspace = true diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index bd2b37c337dcaca448e2175472ea46c126d2f9a3..88f2bea0c0c68480a2ad67f536ecf9d465a6a9ae 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -14,7 +14,7 @@ use async_trait::async_trait; use fs::normalize_path; use gpui::{App, Task}; use language::LanguageName; -use semantic_version::SemanticVersion; +use semver::Version; use task::{SpawnInTerminal, ZedDebugConfig}; use util::rel_path::RelPath; @@ -170,10 +170,7 @@ pub trait Extension: Send + Sync + 'static { ) -> Result; } -pub fn parse_wasm_extension_version( - extension_id: &str, - wasm_bytes: &[u8], -) -> Result { +pub fn parse_wasm_extension_version(extension_id: &str, wasm_bytes: &[u8]) -> Result { let mut version = None; for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) { @@ -200,9 +197,9 @@ pub fn parse_wasm_extension_version( version.with_context(|| format!("extension {extension_id} has no zed:api-version section")) } -fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option { +fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option { if data.len() == 6 { - Some(SemanticVersion::new( + Some(Version::new( u16::from_be_bytes([data[0], data[1]]) as _, u16::from_be_bytes([data[2], data[3]]) as _, u16::from_be_bytes([data[4], data[5]]) as _, diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 1385ec488a2de36f5894ab91ac0ae45881aa625f..8b9bf994d17e0594c719bed29907630fedf11497 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -2,8 +2,9 @@ use crate::{ ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, build_debug_adapter_schema_path, parse_wasm_extension_version, }; +use ::fs::Fs; use anyhow::{Context as _, Result, bail}; -use futures::AsyncReadExt; +use futures::{AsyncReadExt, StreamExt}; use heck::ToSnakeCase; use http_client::{self, AsyncBody, HttpClient}; use serde::Deserialize; @@ -77,8 +78,9 @@ impl ExtensionBuilder { extension_dir: &Path, extension_manifest: &mut ExtensionManifest, options: CompileExtensionOptions, + fs: Arc, ) -> Result<()> { - populate_defaults(extension_manifest, extension_dir)?; + populate_defaults(extension_manifest, extension_dir, fs).await?; if extension_dir.is_relative() { bail!( @@ -247,26 +249,34 @@ impl ExtensionBuilder { let parser_path = src_path.join("parser.c"); let scanner_path = src_path.join("scanner.c"); - log::info!("compiling {grammar_name} parser"); - let clang_output = util::command::new_smol_command(&clang_path) - .args(["-fPIC", "-shared", "-Os"]) - .arg(format!("-Wl,--export=tree_sitter_{grammar_name}")) - .arg("-o") - .arg(&grammar_wasm_path) - .arg("-I") - .arg(&src_path) - .arg(&parser_path) - .args(scanner_path.exists().then_some(scanner_path)) - .output() - .await - .context("failed to run clang")?; - - if !clang_output.status.success() { - bail!( - "failed to compile {} parser with clang: {}", - grammar_name, - String::from_utf8_lossy(&clang_output.stderr), + // Skip recompiling if the WASM object is already newer than the source files + if file_newer_than_deps(&grammar_wasm_path, &[&parser_path, &scanner_path]).unwrap_or(false) + { + log::info!( + "skipping compilation of {grammar_name} parser because the existing compiled grammar is up to date" ); + } else { + log::info!("compiling {grammar_name} parser"); + let clang_output = util::command::new_smol_command(&clang_path) + .args(["-fPIC", "-shared", "-Os"]) + .arg(format!("-Wl,--export=tree_sitter_{grammar_name}")) + .arg("-o") + .arg(&grammar_wasm_path) + .arg("-I") + .arg(&src_path) + .arg(&parser_path) + .args(scanner_path.exists().then_some(scanner_path)) + .output() + .await + .context("failed to run clang")?; + + if !clang_output.status.success() { + bail!( + "failed to compile {} parser with clang: {}", + grammar_name, + String::from_utf8_lossy(&clang_output.stderr), + ); + } } Ok(()) @@ -538,7 +548,11 @@ impl ExtensionBuilder { } } -fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> { +async fn populate_defaults( + manifest: &mut ExtensionManifest, + extension_path: &Path, + fs: Arc, +) -> Result<()> { // For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing // contents of the computed fields, since we don't care what the existing values are. if manifest.schema_version.is_v0() { @@ -553,12 +567,16 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> } let languages_dir = extension_path.join("languages"); - if languages_dir.exists() { - for entry in fs::read_dir(&languages_dir).context("failed to list languages dir")? { - let entry = entry?; - let language_dir = entry.path(); + if fs.is_dir(&languages_dir).await { + let mut language_dir_entries = fs + .read_dir(&languages_dir) + .await + .context("failed to list languages dir")?; + + while let Some(language_dir) = language_dir_entries.next().await { + let language_dir = language_dir?; let config_path = language_dir.join("config.toml"); - if config_path.exists() { + if fs.is_file(config_path.as_path()).await { let relative_language_dir = language_dir.strip_prefix(extension_path)?.to_path_buf(); if !manifest.languages.contains(&relative_language_dir) { @@ -569,10 +587,14 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> } let themes_dir = extension_path.join("themes"); - if themes_dir.exists() { - for entry in fs::read_dir(&themes_dir).context("failed to list themes dir")? { - let entry = entry?; - let theme_path = entry.path(); + if fs.is_dir(&themes_dir).await { + let mut theme_dir_entries = fs + .read_dir(&themes_dir) + .await + .context("failed to list themes dir")?; + + while let Some(theme_path) = theme_dir_entries.next().await { + let theme_path = theme_path?; if theme_path.extension() == Some("json".as_ref()) { let relative_theme_path = theme_path.strip_prefix(extension_path)?.to_path_buf(); if !manifest.themes.contains(&relative_theme_path) { @@ -583,10 +605,14 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> } let icon_themes_dir = extension_path.join("icon_themes"); - if icon_themes_dir.exists() { - for entry in fs::read_dir(&icon_themes_dir).context("failed to list icon themes dir")? { - let entry = entry?; - let icon_theme_path = entry.path(); + if fs.is_dir(&icon_themes_dir).await { + let mut icon_theme_dir_entries = fs + .read_dir(&icon_themes_dir) + .await + .context("failed to list icon themes dir")?; + + while let Some(icon_theme_path) = icon_theme_dir_entries.next().await { + let icon_theme_path = icon_theme_path?; if icon_theme_path.extension() == Some("json".as_ref()) { let relative_icon_theme_path = icon_theme_path.strip_prefix(extension_path)?.to_path_buf(); @@ -595,21 +621,26 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> } } } - } - - let snippets_json_path = extension_path.join("snippets.json"); - if snippets_json_path.exists() { - manifest.snippets = Some(snippets_json_path); + }; + if manifest.snippets.is_none() + && let snippets_json_path = extension_path.join("snippets.json") + && fs.is_file(&snippets_json_path).await + { + manifest.snippets = Some("snippets.json".into()); } // For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in // the manifest using the contents of the `grammars` directory. if manifest.schema_version.is_v0() { let grammars_dir = extension_path.join("grammars"); - if grammars_dir.exists() { - for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? { - let entry = entry?; - let grammar_path = entry.path(); + if fs.is_dir(&grammars_dir).await { + let mut grammar_dir_entries = fs + .read_dir(&grammars_dir) + .await + .context("failed to list grammars dir")?; + + while let Some(grammar_path) = grammar_dir_entries.next().await { + let grammar_path = grammar_path?; if grammar_path.extension() == Some("toml".as_ref()) { #[derive(Deserialize)] struct GrammarConfigToml { @@ -619,7 +650,7 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> pub path: Option, } - let grammar_config = fs::read_to_string(&grammar_path)?; + let grammar_config = fs.load(&grammar_path).await?; let grammar_config: GrammarConfigToml = toml::from_str(&grammar_config)?; let grammar_name = grammar_path @@ -643,3 +674,153 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Ok(()) } + +/// Returns `true` if the target exists and its last modified time is greater than that +/// of each dependency which exists (i.e., dependency paths which do not exist are ignored). +/// +/// # Errors +/// +/// Returns `Err` if any of the underlying file I/O operations fail. +fn file_newer_than_deps(target: &Path, dependencies: &[&Path]) -> Result { + if !target.try_exists()? { + return Ok(false); + } + let target_modified = target.metadata()?.modified()?; + for dependency in dependencies { + if !dependency.try_exists()? { + continue; + } + let dep_modified = dependency.metadata()?.modified()?; + if target_modified < dep_modified { + return Ok(false); + } + } + Ok(true) +} + +#[cfg(test)] +mod tests { + use std::{ + path::{Path, PathBuf}, + str::FromStr, + thread::sleep, + time::Duration, + }; + + use gpui::TestAppContext; + use indoc::indoc; + + use crate::{ + ExtensionManifest, + extension_builder::{file_newer_than_deps, populate_defaults}, + }; + + #[test] + fn test_file_newer_than_deps() { + // Don't use TempTree because we need to guarantee the order + let tmpdir = tempfile::tempdir().unwrap(); + let target = tmpdir.path().join("target.wasm"); + let dep1 = tmpdir.path().join("parser.c"); + let dep2 = tmpdir.path().join("scanner.c"); + + assert!( + !file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(), + "target doesn't exist" + ); + std::fs::write(&target, "foo").unwrap(); // Create target + assert!( + file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(), + "dependencies don't exist; target is newer" + ); + sleep(Duration::from_secs(1)); + std::fs::write(&dep1, "foo").unwrap(); // Create dep1 (newer than target) + // Dependency is newer + assert!( + !file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(), + "a dependency is newer (target {:?}, dep1 {:?})", + target.metadata().unwrap().modified().unwrap(), + dep1.metadata().unwrap().modified().unwrap(), + ); + sleep(Duration::from_secs(1)); + std::fs::write(&dep2, "foo").unwrap(); // Create dep2 + sleep(Duration::from_secs(1)); + std::fs::write(&target, "foobar").unwrap(); // Update target + assert!( + file_newer_than_deps(&target, &[&dep1, &dep2]).unwrap(), + "target is newer than dependencies (target {:?}, dep2 {:?})", + target.metadata().unwrap().modified().unwrap(), + dep2.metadata().unwrap().modified().unwrap(), + ); + } + + #[gpui::test] + async fn test_snippet_location_is_kept(cx: &mut TestAppContext) { + let fs = fs::FakeFs::new(cx.executor()); + let extension_path = Path::new("/extension"); + + fs.insert_tree( + extension_path, + serde_json::json!({ + "extension.toml": indoc! {r#" + id = "test-manifest" + name = "Test Manifest" + version = "0.0.1" + schema_version = 1 + + snippets = "./snippets/snippets.json" + "# + }, + "snippets.json": "", + }), + ) + .await; + + let mut manifest = ExtensionManifest::load(fs.clone(), extension_path) + .await + .unwrap(); + + populate_defaults(&mut manifest, extension_path, fs.clone()) + .await + .unwrap(); + + assert_eq!( + manifest.snippets, + Some(PathBuf::from_str("./snippets/snippets.json").unwrap()) + ) + } + + #[gpui::test] + async fn test_automatic_snippet_location_is_relative(cx: &mut TestAppContext) { + let fs = fs::FakeFs::new(cx.executor()); + let extension_path = Path::new("/extension"); + + fs.insert_tree( + extension_path, + serde_json::json!({ + "extension.toml": indoc! {r#" + id = "test-manifest" + name = "Test Manifest" + version = "0.0.1" + schema_version = 1 + + "# + }, + "snippets.json": "", + }), + ) + .await; + + let mut manifest = ExtensionManifest::load(fs.clone(), extension_path) + .await + .unwrap(); + + populate_defaults(&mut manifest, extension_path, fs.clone()) + .await + .unwrap(); + + assert_eq!( + manifest.snippets, + Some(PathBuf::from_str("snippets.json").unwrap()) + ) + } +} diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 7e074ffcab77ceb2a63fd92448faa2e13f4ec8c4..4ecdd378ca86dbee263e439e13fa4776dab9e316 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -3,7 +3,7 @@ use collections::{BTreeMap, HashMap}; use fs::Fs; use language::LanguageName; use lsp::LanguageServerName; -use semantic_version::SemanticVersion; +use semver::Version; use serde::{Deserialize, Serialize}; use std::{ ffi::OsStr, @@ -137,7 +137,7 @@ pub fn build_debug_adapter_schema_path( #[derive(Clone, Default, PartialEq, Eq, Debug, Deserialize, Serialize)] pub struct LibManifestEntry { pub kind: Option, - pub version: Option, + pub version: Option, } #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] @@ -193,6 +193,36 @@ pub struct TargetConfig { /// If not provided and the URL is a GitHub release, we'll attempt to fetch it from GitHub. #[serde(default)] pub sha256: Option, + /// Environment variables to set when launching the agent server. + /// These target-specific env vars will override any env vars set at the agent level. + #[serde(default)] + pub env: HashMap, +} + +impl TargetConfig { + pub fn from_proto(proto: proto::ExternalExtensionAgentTarget) -> Self { + Self { + archive: proto.archive, + cmd: proto.cmd, + args: proto.args, + sha256: proto.sha256, + env: proto.env.into_iter().collect(), + } + } + + pub fn to_proto(&self) -> proto::ExternalExtensionAgentTarget { + proto::ExternalExtensionAgentTarget { + archive: self.archive.clone(), + cmd: self.cmd.clone(), + args: self.args.clone(), + sha256: self.sha256.clone(), + env: self + .env + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect(), + } + } } #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] @@ -265,27 +295,26 @@ impl ExtensionManifest { .and_then(OsStr::to_str) .context("invalid extension name")?; - let mut extension_manifest_path = extension_dir.join("extension.json"); + let extension_manifest_path = extension_dir.join("extension.toml"); if fs.is_file(&extension_manifest_path).await { - let manifest_content = fs - .load(&extension_manifest_path) - .await - .with_context(|| format!("failed to load {extension_name} extension.json"))?; - let manifest_json = serde_json::from_str::(&manifest_content) - .with_context(|| { - format!("invalid extension.json for extension {extension_name}") - })?; - - Ok(manifest_from_old_manifest(manifest_json, extension_name)) - } else { - extension_manifest_path.set_extension("toml"); - let manifest_content = fs - .load(&extension_manifest_path) - .await - .with_context(|| format!("failed to load {extension_name} extension.toml"))?; + let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| { + format!("loading {extension_name} extension.toml, {extension_manifest_path:?}") + })?; toml::from_str(&manifest_content).map_err(|err| { anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}") }) + } else if let extension_manifest_path = extension_manifest_path.with_extension("json") + && fs.is_file(&extension_manifest_path).await + { + let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| { + format!("loading {extension_name} extension.json, {extension_manifest_path:?}") + })?; + + serde_json::from_str::(&manifest_content) + .with_context(|| format!("invalid extension.json for extension {extension_name}")) + .map(|manifest_json| manifest_from_old_manifest(manifest_json, extension_name)) + } else { + anyhow::bail!("No extension manifest found for extension {extension_name}") } } } diff --git a/crates/extension_api/Cargo.toml b/crates/extension_api/Cargo.toml index 318a0024bf4d9bae76af888b6668d7c21f37f804..829455e62912883bea85f429a1a8917e6360d0fb 100644 --- a/crates/extension_api/Cargo.toml +++ b/crates/extension_api/Cargo.toml @@ -1,12 +1,13 @@ [package] name = "zed_extension_api" -version = "0.7.0" +version = "0.8.0" description = "APIs for creating Zed extensions in Rust" repository = "https://github.com/zed-industries/zed" documentation = "https://docs.rs/zed_extension_api" keywords = ["zed", "extension"] edition.workspace = true -publish = true +# Change back to `true` when we're ready to publish v0.8.0. +publish = false license = "Apache-2.0" [lints] diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index 723e5442098f1a66b78b86fa7ed980a18944778b..9418623224289f795fed061acbfc6035a4cc5cdf 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -334,7 +334,7 @@ mod wit { wit_bindgen::generate!({ skip: ["init-extension"], - path: "./wit/since_v0.6.0", + path: "./wit/since_v0.8.0", }); } diff --git a/crates/extension_api/wit/since_v0.8.0/common.wit b/crates/extension_api/wit/since_v0.8.0/common.wit new file mode 100644 index 0000000000000000000000000000000000000000..139e7ba0ca4d1cc5ac78ccd23673ca749d6e46b2 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/common.wit @@ -0,0 +1,12 @@ +interface common { + /// A (half-open) range (`[start, end)`). + record range { + /// The start of the range (inclusive). + start: u32, + /// The end of the range (exclusive). + end: u32, + } + + /// A list of environment variables. + type env-vars = list>; +} diff --git a/crates/extension_api/wit/since_v0.8.0/context-server.wit b/crates/extension_api/wit/since_v0.8.0/context-server.wit new file mode 100644 index 0000000000000000000000000000000000000000..7234e0e6d0f6d444e92a056a92f6c90c7dc053b4 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/context-server.wit @@ -0,0 +1,11 @@ +interface context-server { + /// Configuration for context server setup and installation. + record context-server-configuration { + /// Installation instructions in Markdown format. + installation-instructions: string, + /// JSON schema for settings validation. + settings-schema: string, + /// Default settings template. + default-settings: string, + } +} diff --git a/crates/extension_api/wit/since_v0.8.0/dap.wit b/crates/extension_api/wit/since_v0.8.0/dap.wit new file mode 100644 index 0000000000000000000000000000000000000000..693befe02f9c313455facd4839572528c3408fd1 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/dap.wit @@ -0,0 +1,123 @@ +interface dap { + use common.{env-vars}; + + /// Resolves a specified TcpArgumentsTemplate into TcpArguments + resolve-tcp-template: func(template: tcp-arguments-template) -> result; + + record launch-request { + program: string, + cwd: option, + args: list, + envs: env-vars, + } + + record attach-request { + process-id: option, + } + + variant debug-request { + launch(launch-request), + attach(attach-request) + } + + record tcp-arguments { + port: u16, + host: u32, + timeout: option, + } + + record tcp-arguments-template { + port: option, + host: option, + timeout: option, + } + + /// Debug Config is the "highest-level" configuration for a debug session. + /// It comes from a new process modal UI; thus, it is essentially debug-adapter-agnostic. + /// It is expected of the extension to translate this generic configuration into something that can be debugged by the adapter (debug scenario). + record debug-config { + /// Name of the debug task + label: string, + /// The debug adapter to use + adapter: string, + request: debug-request, + stop-on-entry: option, + } + + record task-template { + /// Human readable name of the task to display in the UI. + label: string, + /// Executable command to spawn. + command: string, + args: list, + env: env-vars, + cwd: option, + } + + /// A task template with substituted task variables. + type resolved-task = task-template; + + /// A task template for building a debug target. + type build-task-template = task-template; + + variant build-task-definition { + by-name(string), + template(build-task-definition-template-payload ) + } + record build-task-definition-template-payload { + locator-name: option, + template: build-task-template + } + + /// Debug Scenario is the user-facing configuration type (used in debug.json). It is still concerned with what to debug and not necessarily how to do it (except for any + /// debug-adapter-specific configuration options). + record debug-scenario { + /// Unsubstituted label for the task.DebugAdapterBinary + label: string, + /// Name of the Debug Adapter this configuration is intended for. + adapter: string, + /// An optional build step to be ran prior to starting a debug session. Build steps are used by Zed's locators to locate the executable to debug. + build: option, + /// JSON-encoded configuration for a given debug adapter. + config: string, + /// TCP connection parameters (if they were specified by user) + tcp-connection: option, + } + + enum start-debugging-request-arguments-request { + launch, + attach, + } + + record debug-task-definition { + /// Unsubstituted label for the task.DebugAdapterBinary + label: string, + /// Name of the Debug Adapter this configuration is intended for. + adapter: string, + /// JSON-encoded configuration for a given debug adapter. + config: string, + /// TCP connection parameters (if they were specified by user) + tcp-connection: option, + } + + record start-debugging-request-arguments { + /// JSON-encoded configuration for a given debug adapter. It is specific to each debug adapter. + /// `configuration` will have it's Zed variable references substituted prior to being passed to the debug adapter. + configuration: string, + request: start-debugging-request-arguments-request, + } + + /// The lowest-level representation of a debug session, which specifies: + /// - How to start a debug adapter process + /// - How to start a debug session with it (using DAP protocol) + /// for a given debug scenario. + record debug-adapter-binary { + command: option, + arguments: list, + envs: env-vars, + cwd: option, + /// Zed will use TCP transport if `connection` is specified. + connection: option, + request-args: start-debugging-request-arguments + } +} diff --git a/crates/extension_api/wit/since_v0.8.0/extension.wit b/crates/extension_api/wit/since_v0.8.0/extension.wit new file mode 100644 index 0000000000000000000000000000000000000000..8195162b89a420d322970bf894bd9ec824119087 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/extension.wit @@ -0,0 +1,167 @@ +package zed:extension; + +world extension { + import context-server; + import dap; + import github; + import http-client; + import platform; + import process; + import nodejs; + + use common.{env-vars, range}; + use context-server.{context-server-configuration}; + use dap.{attach-request, build-task-template, debug-config, debug-adapter-binary, debug-task-definition, debug-request, debug-scenario, launch-request, resolved-task, start-debugging-request-arguments-request}; + use lsp.{completion, symbol}; + use process.{command}; + use slash-command.{slash-command, slash-command-argument-completion, slash-command-output}; + + /// Initializes the extension. + export init-extension: func(); + + /// The type of a downloaded file. + enum downloaded-file-type { + /// A gzipped file (`.gz`). + gzip, + /// A gzipped tar archive (`.tar.gz`). + gzip-tar, + /// A ZIP file (`.zip`). + zip, + /// An uncompressed file. + uncompressed, + } + + /// The installation status for a language server. + variant language-server-installation-status { + /// The language server has no installation status. + none, + /// The language server is being downloaded. + downloading, + /// The language server is checking for updates. + checking-for-update, + /// The language server installation failed for specified reason. + failed(string), + } + + record settings-location { + worktree-id: u64, + path: string, + } + + import get-settings: func(path: option, category: string, key: option) -> result; + + /// Downloads a file from the given URL and saves it to the given path within the extension's + /// working directory. + /// + /// The file will be extracted according to the given file type. + import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Makes the file at the given path executable. + import make-file-executable: func(filepath: string) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + /// A Zed worktree. + resource worktree { + /// Returns the ID of the worktree. + id: func() -> u64; + /// Returns the root path of the worktree. + root-path: func() -> string; + /// Returns the textual contents of the specified file in the worktree. + read-text-file: func(path: string) -> result; + /// Returns the path to the given binary name, if one is present on the `$PATH`. + which: func(binary-name: string) -> option; + /// Returns the current shell environment. + shell-env: func() -> env-vars; + } + + /// A Zed project. + resource project { + /// Returns the IDs of all of the worktrees in this project. + worktree-ids: func() -> list; + } + + /// A key-value store. + resource key-value-store { + /// Inserts an entry under the specified key. + insert: func(key: string, value: string) -> result<_, string>; + } + + /// Returns the command used to start up the language server. + export language-server-command: func(language-server-id: string, worktree: borrow) -> result; + + /// Returns the initialization options to pass to the language server on startup. + /// + /// The initialization options are represented as a JSON string. + export language-server-initialization-options: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the workspace configuration options to pass to the language server. + export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the initialization options to pass to the other language server. + export language-server-additional-initialization-options: func(language-server-id: string, target-language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the workspace configuration options to pass to the other language server. + export language-server-additional-workspace-configuration: func(language-server-id: string, target-language-server-id: string, worktree: borrow) -> result, string>; + + /// A label containing some code. + record code-label { + /// The source code to parse with Tree-sitter. + code: string, + /// The spans to display in the label. + spans: list, + /// The range of the displayed label to include when filtering. + filter-range: range, + } + + /// A span within a code label. + variant code-label-span { + /// A range into the parsed code. + code-range(range), + /// A span containing a code literal. + literal(code-label-span-literal), + } + + /// A span containing a code literal. + record code-label-span-literal { + /// The literal text. + text: string, + /// The name of the highlight to use for this literal. + highlight-name: option, + } + + export labels-for-completions: func(language-server-id: string, completions: list) -> result>, string>; + export labels-for-symbols: func(language-server-id: string, symbols: list) -> result>, string>; + + + /// Returns the completions that should be shown when completing the provided slash command with the given query. + export complete-slash-command-argument: func(command: slash-command, args: list) -> result, string>; + + /// Returns the output from running the provided slash command. + export run-slash-command: func(command: slash-command, args: list, worktree: option>) -> result; + + /// Returns the command used to start up a context server. + export context-server-command: func(context-server-id: string, project: borrow) -> result; + + /// Returns the configuration for a context server. + export context-server-configuration: func(context-server-id: string, project: borrow) -> result, string>; + + /// Returns a list of packages as suggestions to be included in the `/docs` + /// search results. + /// + /// This can be used to provide completions for known packages (e.g., from the + /// local project or a registry) before a package has been indexed. + export suggest-docs-packages: func(provider-name: string) -> result, string>; + + /// Indexes the docs for the specified package. + export index-docs: func(provider-name: string, package-name: string, database: borrow) -> result<_, string>; + + /// Returns a configured debug adapter binary for a given debug task. + export get-dap-binary: func(adapter-name: string, config: debug-task-definition, user-installed-path: option, worktree: borrow) -> result; + /// Returns the kind of a debug scenario (launch or attach). + export dap-request-kind: func(adapter-name: string, config: string) -> result; + export dap-config-to-scenario: func(config: debug-config) -> result; + export dap-locator-create-scenario: func(locator-name: string, build-config-template: build-task-template, resolved-label: string, debug-adapter-name: string) -> option; + export run-dap-locator: func(locator-name: string, config: resolved-task) -> result; +} diff --git a/crates/extension_api/wit/since_v0.8.0/github.wit b/crates/extension_api/wit/since_v0.8.0/github.wit new file mode 100644 index 0000000000000000000000000000000000000000..21cd5d48056af08441d3bb5aa8547edd97a874d7 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/github.wit @@ -0,0 +1,35 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + /// + /// Takes repo as a string in the form "/", for example: "zed-industries/zed". + latest-github-release: func(repo: string, options: github-release-options) -> result; + + /// Returns the GitHub release with the specified tag name for the given GitHub repository. + /// + /// Returns an error if a release with the given tag name does not exist. + github-release-by-tag-name: func(repo: string, tag: string) -> result; +} diff --git a/crates/extension_api/wit/since_v0.8.0/http-client.wit b/crates/extension_api/wit/since_v0.8.0/http-client.wit new file mode 100644 index 0000000000000000000000000000000000000000..bb0206c17a52d4d20b99f445dca4ac606e0485f7 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/http-client.wit @@ -0,0 +1,67 @@ +interface http-client { + /// An HTTP request. + record http-request { + /// The HTTP method for the request. + method: http-method, + /// The URL to which the request should be made. + url: string, + /// The headers for the request. + headers: list>, + /// The request body. + body: option>, + /// The policy to use for redirects. + redirect-policy: redirect-policy, + } + + /// HTTP methods. + enum http-method { + /// `GET` + get, + /// `HEAD` + head, + /// `POST` + post, + /// `PUT` + put, + /// `DELETE` + delete, + /// `OPTIONS` + options, + /// `PATCH` + patch, + } + + /// The policy for dealing with redirects received from the server. + variant redirect-policy { + /// Redirects from the server will not be followed. + /// + /// This is the default behavior. + no-follow, + /// Redirects from the server will be followed up to the specified limit. + follow-limit(u32), + /// All redirects from the server will be followed. + follow-all, + } + + /// An HTTP response. + record http-response { + /// The response headers. + headers: list>, + /// The response body. + body: list, + } + + /// Performs an HTTP request and returns the response. + fetch: func(req: http-request) -> result; + + /// An HTTP response stream. + resource http-response-stream { + /// Retrieves the next chunk of data from the response stream. + /// + /// Returns `Ok(None)` if the stream has ended. + next-chunk: func() -> result>, string>; + } + + /// Performs an HTTP request and returns a response stream. + fetch-stream: func(req: http-request) -> result; +} diff --git a/crates/extension_api/wit/since_v0.8.0/lsp.wit b/crates/extension_api/wit/since_v0.8.0/lsp.wit new file mode 100644 index 0000000000000000000000000000000000000000..91a36c93a66467ea7dc7d78932d3821dae79d864 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/lsp.wit @@ -0,0 +1,90 @@ +interface lsp { + /// An LSP completion. + record completion { + label: string, + label-details: option, + detail: option, + kind: option, + insert-text-format: option, + } + + /// The kind of an LSP completion. + variant completion-kind { + text, + method, + function, + %constructor, + field, + variable, + class, + %interface, + module, + property, + unit, + value, + %enum, + keyword, + snippet, + color, + file, + reference, + folder, + enum-member, + constant, + struct, + event, + operator, + type-parameter, + other(s32), + } + + /// Label details for an LSP completion. + record completion-label-details { + detail: option, + description: option, + } + + /// Defines how to interpret the insert text in a completion item. + variant insert-text-format { + plain-text, + snippet, + other(s32), + } + + /// An LSP symbol. + record symbol { + kind: symbol-kind, + name: string, + } + + /// The kind of an LSP symbol. + variant symbol-kind { + file, + module, + namespace, + %package, + class, + method, + property, + field, + %constructor, + %enum, + %interface, + function, + variable, + constant, + %string, + number, + boolean, + array, + object, + key, + null, + enum-member, + struct, + event, + operator, + type-parameter, + other(s32), + } +} diff --git a/crates/extension_api/wit/since_v0.8.0/nodejs.wit b/crates/extension_api/wit/since_v0.8.0/nodejs.wit new file mode 100644 index 0000000000000000000000000000000000000000..c814548314162c862e81a98b3fba6950dc2a7f41 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/nodejs.wit @@ -0,0 +1,13 @@ +interface nodejs { + /// Returns the path to the Node binary used by Zed. + node-binary-path: func() -> result; + + /// Returns the latest version of the given NPM package. + npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + npm-install-package: func(package-name: string, version: string) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.8.0/platform.wit b/crates/extension_api/wit/since_v0.8.0/platform.wit new file mode 100644 index 0000000000000000000000000000000000000000..48472a99bc175fdc24231a690db021433d5a2505 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.8.0/process.wit b/crates/extension_api/wit/since_v0.8.0/process.wit new file mode 100644 index 0000000000000000000000000000000000000000..d9a5728a3d8f5bdaa578d9dd9fc087610688cf27 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/process.wit @@ -0,0 +1,29 @@ +interface process { + use common.{env-vars}; + + /// A command. + record command { + /// The command to execute. + command: string, + /// The arguments to pass to the command. + args: list, + /// The environment variables to set for the command. + env: env-vars, + } + + /// The output of a finished process. + record output { + /// The status (exit code) of the process. + /// + /// On Unix, this will be `None` if the process was terminated by a signal. + status: option, + /// The data that the process wrote to stdout. + stdout: list, + /// The data that the process wrote to stderr. + stderr: list, + } + + /// Executes the given command as a child process, waiting for it to finish + /// and collecting all of its output. + run-command: func(command: command) -> result; +} diff --git a/crates/extension_api/wit/since_v0.8.0/settings.rs b/crates/extension_api/wit/since_v0.8.0/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..19e28c1ba955a998fe7b97f3eacb57c4b1104154 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/settings.rs @@ -0,0 +1,40 @@ +use serde::{Deserialize, Serialize}; +use std::{collections::HashMap, num::NonZeroU32}; + +/// The settings for a particular language. +#[derive(Debug, Serialize, Deserialize)] +pub struct LanguageSettings { + /// How many columns a tab should occupy. + pub tab_size: NonZeroU32, +} + +/// The settings for a particular language server. +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct LspSettings { + /// The settings for the language server binary. + pub binary: Option, + /// The initialization options to pass to the language server. + pub initialization_options: Option, + /// The settings to pass to language server. + pub settings: Option, +} + +/// The settings for a particular context server. +#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct ContextServerSettings { + /// The settings for the context server binary. + pub command: Option, + /// The settings to pass to the context server. + pub settings: Option, +} + +/// The settings for a command. +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommandSettings { + /// The path to the command. + pub path: Option, + /// The arguments to pass to the command. + pub arguments: Option>, + /// The environment variables. + pub env: Option>, +} diff --git a/crates/extension_api/wit/since_v0.8.0/slash-command.wit b/crates/extension_api/wit/since_v0.8.0/slash-command.wit new file mode 100644 index 0000000000000000000000000000000000000000..f52561c2ef412be071820f3a71621c3c4f3f9da3 --- /dev/null +++ b/crates/extension_api/wit/since_v0.8.0/slash-command.wit @@ -0,0 +1,41 @@ +interface slash-command { + use common.{range}; + + /// A slash command for use in the Assistant. + record slash-command { + /// The name of the slash command. + name: string, + /// The description of the slash command. + description: string, + /// The tooltip text to display for the run button. + tooltip-text: string, + /// Whether this slash command requires an argument. + requires-argument: bool, + } + + /// The output of a slash command. + record slash-command-output { + /// The text produced by the slash command. + text: string, + /// The list of sections to show in the slash command placeholder. + sections: list, + } + + /// A section in the slash command output. + record slash-command-output-section { + /// The range this section occupies. + range: range, + /// The label to display in the placeholder for this section. + label: string, + } + + /// A completion for a slash command argument. + record slash-command-argument-completion { + /// The label to display for this completion. + label: string, + /// The new text that should be inserted into the command when this completion is accepted. + new-text: string, + /// Whether the command should be run when accepting this completion. + run-command: bool, + } +} diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 524e14b0cedcebef259948d73b530236525180c0..699a6b014322833a15fd593d7e5ac613a221ed24 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -71,6 +71,7 @@ async fn main() -> Result<()> { &extension_path, &mut manifest, CompileExtensionOptions { release: true }, + fs.clone(), ) .await .context("failed to compile extension")?; diff --git a/crates/extension_host/Cargo.toml b/crates/extension_host/Cargo.toml index 16cbd9ac0c0ef938322f2b57789c7542549a570a..328b808b1310e3402405c52ce27a8ae15c4d5ece 100644 --- a/crates/extension_host/Cargo.toml +++ b/crates/extension_host/Cargo.toml @@ -38,7 +38,7 @@ paths.workspace = true project.workspace = true remote.workspace = true release_channel.workspace = true -semantic_version.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true diff --git a/crates/extension_host/benches/extension_compilation_benchmark.rs b/crates/extension_host/benches/extension_compilation_benchmark.rs index 9cb57fc1fb800df3f20d277cff5c85ecddadf5ad..a28f617dc36e5cba3ad36d7ab6477e7a665dd5c4 100644 --- a/crates/extension_host/benches/extension_compilation_benchmark.rs +++ b/crates/extension_host/benches/extension_compilation_benchmark.rs @@ -7,8 +7,8 @@ use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, }; use extension_host::wasm_host::WasmHost; -use fs::RealFs; -use gpui::{SemanticVersion, TestAppContext, TestDispatcher}; +use fs::{Fs, RealFs}; +use gpui::{TestAppContext, TestDispatcher}; use http_client::{FakeHttpClient, Response}; use node_runtime::NodeRuntime; use rand::{SeedableRng, rngs::StdRng}; @@ -24,7 +24,11 @@ fn extension_benchmarks(c: &mut Criterion) { let mut group = c.benchmark_group("load"); let mut manifest = manifest(); - let wasm_bytes = wasm_bytes(&cx, &mut manifest); + let wasm_bytes = wasm_bytes( + &cx, + &mut manifest, + Arc::new(RealFs::new(None, cx.executor())), + ); let manifest = Arc::new(manifest); let extensions_dir = TempTree::new(json!({ "installed": {}, @@ -54,13 +58,13 @@ fn init() -> TestAppContext { cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); }); cx } -fn wasm_bytes(cx: &TestAppContext, manifest: &mut ExtensionManifest) -> Vec { +fn wasm_bytes(cx: &TestAppContext, manifest: &mut ExtensionManifest, fs: Arc) -> Vec { let extension_builder = extension_builder(); let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() @@ -73,6 +77,7 @@ fn wasm_bytes(cx: &TestAppContext, manifest: &mut ExtensionManifest) -> Vec &path, manifest, CompileExtensionOptions { release: true }, + fs, )) .unwrap(); std::fs::read(path.join("extension.wasm")).unwrap() @@ -124,7 +129,7 @@ fn manifest() -> ExtensionManifest { icon_themes: Vec::new(), lib: LibManifestEntry { kind: Some(ExtensionLibraryKind::Rust), - version: Some(SemanticVersion::new(0, 1, 0)), + version: Some(semver::Version::new(0, 1, 0)), }, languages: Vec::new(), grammars: BTreeMap::default(), diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 7a2e13972e31091a22b0390a51732d68393c99a0..09e8259771668346c237c1cc05e6074ca3b37797 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -11,7 +11,7 @@ use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use client::ExtensionProvides; use client::{Client, ExtensionMetadata, GetExtensionsResponse, proto, telemetry::Telemetry}; -use collections::{BTreeMap, BTreeSet, HashMap, HashSet, btree_map}; +use collections::{BTreeMap, BTreeSet, HashSet, btree_map}; pub use extension::ExtensionManifest; use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; use extension::{ @@ -43,8 +43,8 @@ use language::{ use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; use release_channel::ReleaseChannel; -use remote::{RemoteClient, RemoteConnectionOptions}; -use semantic_version::SemanticVersion; +use remote::RemoteClient; +use semver::Version; use serde::{Deserialize, Serialize}; use settings::Settings; use std::ops::RangeInclusive; @@ -98,7 +98,7 @@ pub fn is_version_compatible( .manifest .wasm_api_version .as_ref() - .and_then(|wasm_api_version| SemanticVersion::from_str(wasm_api_version).ok()) + .and_then(|wasm_api_version| Version::from_str(wasm_api_version).ok()) && !is_supported_wasm_api_version(release_channel, wasm_api_version) { return false; @@ -123,7 +123,7 @@ pub struct ExtensionStore { pub wasm_host: Arc, pub wasm_extensions: Vec<(Arc, WasmExtension)>, pub tasks: Vec>, - pub remote_clients: HashMap>, + pub remote_clients: Vec>, pub ssh_registered_tx: UnboundedSender<()>, } @@ -274,7 +274,7 @@ impl ExtensionStore { reload_tx, tasks: Vec::new(), - remote_clients: HashMap::default(), + remote_clients: Default::default(), ssh_registered_tx: connection_registered_tx, }; @@ -343,12 +343,12 @@ impl ExtensionStore { let index = this .update(cx, |this, cx| this.rebuild_extension_index(cx))? .await; - this.update( cx, |this, cx| this.extensions_updated(index, cx))? + this.update(cx, |this, cx| this.extensions_updated(index, cx))? .await; index_changed = false; } - Self::update_ssh_clients(&this, cx).await?; + Self::update_remote_clients(&this, cx).await?; } _ = connection_registered_rx.next() => { debounce_timer = cx @@ -639,9 +639,8 @@ impl ExtensionStore { this.extension_index.extensions.get(&extension.id) { let installed_version = - SemanticVersion::from_str(&installed_extension.manifest.version).ok()?; - let latest_version = - SemanticVersion::from_str(&extension.manifest.version).ok()?; + Version::from_str(&installed_extension.manifest.version).ok()?; + let latest_version = Version::from_str(&extension.manifest.version).ok()?; if installed_version >= latest_version { return None; @@ -758,29 +757,28 @@ impl ExtensionStore { if let Some(content_length) = content_length { let actual_len = tar_gz_bytes.len(); if content_length != actual_len { - bail!("downloaded extension size {actual_len} does not match content length {content_length}"); + bail!(concat!( + "downloaded extension size {actual_len} ", + "does not match content length {content_length}" + )); } } let decompressed_bytes = GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice())); let archive = Archive::new(decompressed_bytes); archive.unpack(extension_dir).await?; - this.update( cx, |this, cx| { - this.reload(Some(extension_id.clone()), cx) - })? - .await; + this.update(cx, |this, cx| this.reload(Some(extension_id.clone()), cx))? + .await; if let ExtensionOperation::Install = operation { - this.update( cx, |this, cx| { + this.update(cx, |this, cx| { cx.emit(Event::ExtensionInstalled(extension_id.clone())); if let Some(events) = ExtensionEvents::try_global(cx) - && let Some(manifest) = this.extension_manifest_for_id(&extension_id) { - events.update(cx, |this, cx| { - this.emit( - extension::Event::ExtensionInstalled(manifest.clone()), - cx, - ) - }); - } + && let Some(manifest) = this.extension_manifest_for_id(&extension_id) + { + events.update(cx, |this, cx| { + this.emit(extension::Event::ExtensionInstalled(manifest.clone()), cx) + }); + } }) .ok(); } @@ -982,12 +980,14 @@ impl ExtensionStore { cx.background_spawn({ let extension_source_path = extension_source_path.clone(); + let fs = fs.clone(); async move { builder .compile_extension( &extension_source_path, &mut extension_manifest, CompileExtensionOptions { release: false }, + fs, ) .await } @@ -1044,12 +1044,13 @@ impl ExtensionStore { cx.notify(); let compile = cx.background_spawn(async move { - let mut manifest = ExtensionManifest::load(fs, &path).await?; + let mut manifest = ExtensionManifest::load(fs.clone(), &path).await?; builder .compile_extension( &path, &mut manifest, CompileExtensionOptions { release: true }, + fs, ) .await }); @@ -1129,6 +1130,7 @@ impl ExtensionStore { } if extensions_to_load.is_empty() && extensions_to_unload.is_empty() { + self.reload_complete_senders.clear(); return Task::ready(()); } @@ -1377,7 +1379,11 @@ impl ExtensionStore { wasm_extensions.push((extension.manifest.clone(), wasm_extension)) } Err(e) => { - log::error!("Failed to load extension: {e:#}"); + log::error!( + "Failed to load extension: {}, {:#}", + extension.manifest.id, + e + ); this.update(cx, |_, cx| { cx.emit(Event::ExtensionFailedToLoad(extension.manifest.id.clone())) }) @@ -1726,7 +1732,7 @@ impl ExtensionStore { }) } - async fn sync_extensions_over_ssh( + async fn sync_extensions_to_remotes( this: &WeakEntity, client: WeakEntity, cx: &mut AsyncApp, @@ -1779,7 +1785,11 @@ impl ExtensionStore { })?, path_style, ); - log::info!("Uploading extension {}", missing_extension.clone().id); + log::info!( + "Uploading extension {} to {:?}", + missing_extension.clone().id, + dest_dir + ); client .update(cx, |client, cx| { @@ -1792,27 +1802,35 @@ impl ExtensionStore { missing_extension.clone().id ); - client + let result = client .update(cx, |client, _cx| { client.proto_client().request(proto::InstallExtension { tmp_dir: dest_dir.to_proto(), - extension: Some(missing_extension), + extension: Some(missing_extension.clone()), }) })? - .await?; + .await; + + if let Err(e) = result { + log::error!( + "Failed to install extension {}: {}", + missing_extension.id, + e + ); + } } anyhow::Ok(()) } - pub async fn update_ssh_clients(this: &WeakEntity, cx: &mut AsyncApp) -> Result<()> { + pub async fn update_remote_clients(this: &WeakEntity, cx: &mut AsyncApp) -> Result<()> { let clients = this.update(cx, |this, _cx| { - this.remote_clients.retain(|_k, v| v.upgrade().is_some()); - this.remote_clients.values().cloned().collect::>() + this.remote_clients.retain(|v| v.upgrade().is_some()); + this.remote_clients.clone() })?; for client in clients { - Self::sync_extensions_over_ssh(this, client, cx) + Self::sync_extensions_to_remotes(this, client, cx) .await .log_err(); } @@ -1820,16 +1838,12 @@ impl ExtensionStore { anyhow::Ok(()) } - pub fn register_remote_client(&mut self, client: Entity, cx: &mut Context) { - let options = client.read(cx).connection_options(); - - if let Some(existing_client) = self.remote_clients.get(&options) - && existing_client.upgrade().is_some() - { - return; - } - - self.remote_clients.insert(options, client.downgrade()); + pub fn register_remote_client( + &mut self, + client: Entity, + _cx: &mut Context, + ) { + self.remote_clients.push(client.downgrade()); self.ssh_registered_tx.unbounded_send(()).ok(); } } diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index 7ba368c667ef5bdd37a09b53b697a062a2a0fc8b..54b090347ffad3ffed444827f5cb60c120d25ad7 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -8,7 +8,7 @@ use collections::{BTreeMap, HashSet}; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs, RealFs}; use futures::{AsyncReadExt, StreamExt, io::BufReader}; -use gpui::{AppContext as _, SemanticVersion, TestAppContext}; +use gpui::{AppContext as _, TestAppContext}; use http_client::{FakeHttpClient, Response}; use language::{BinaryStatus, LanguageMatcher, LanguageName, LanguageRegistry}; use language_extension::LspAccess; @@ -307,9 +307,9 @@ async fn test_extension_store(cx: &mut TestAppContext) { assert_eq!( language_registry.language_names(), [ - LanguageName::new("ERB"), - LanguageName::new("Plain Text"), - LanguageName::new("Ruby"), + LanguageName::new_static("ERB"), + LanguageName::new_static("Plain Text"), + LanguageName::new_static("Ruby"), ] ); assert_eq!( @@ -463,9 +463,9 @@ async fn test_extension_store(cx: &mut TestAppContext) { assert_eq!( language_registry.language_names(), [ - LanguageName::new("ERB"), - LanguageName::new("Plain Text"), - LanguageName::new("Ruby"), + LanguageName::new_static("ERB"), + LanguageName::new_static("Plain Text"), + LanguageName::new_static("Ruby"), ] ); assert_eq!( @@ -523,7 +523,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { assert_eq!( language_registry.language_names(), - [LanguageName::new("Plain Text")] + [LanguageName::new_static("Plain Text")] ); assert_eq!(language_registry.grammar_names(), []); }); @@ -705,7 +705,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { .await .unwrap(); - let mut fake_servers = language_registry.register_fake_language_server( + let mut fake_servers = language_registry.register_fake_lsp_server( LanguageServerName("gleam".into()), lsp::ServerCapabilities { completion_provider: Some(Default::default()), @@ -866,7 +866,7 @@ fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); - release_channel::init(SemanticVersion::default(), cx); + release_channel::init(semver::Version::new(0, 0, 0), cx); extension::init(cx); theme::init(theme::LoadThemes::JustBase, cx); gpui_tokio::init(cx); diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index f14bb811a6742a60899ac4301cfac096bb41a07f..c3a290a55a8f901553d6d2b2542d8af8bcd1665c 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -96,7 +96,7 @@ impl HeadlessExtensionStore { for extension in to_load { if let Err(e) = Self::load_extension(this.clone(), extension.clone(), cx).await { - log::info!("failed to load extension: {}, {:?}", extension.id, e); + log::info!("failed to load extension: {}, {:#}", extension.id, e); missing.push(extension) } else if extension.dev { missing.push(extension) @@ -279,7 +279,8 @@ impl HeadlessExtensionStore { } fs.rename(&tmp_path, &path, RenameOptions::default()) - .await?; + .await + .context("Failed to rename {tmp_path:?} to {path:?}")?; Self::load_extension(this, extension, cx).await }) diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index eb26c44f20519b7cdb3a38859f23ce99365fe505..a6e5768f16243ce6c6a4d250002e29d5db06a071 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -28,7 +28,7 @@ use lsp::LanguageServerName; use moka::sync::Cache; use node_runtime::NodeRuntime; use release_channel::ReleaseChannel; -use semantic_version::SemanticVersion; +use semver::Version; use settings::Settings; use std::{ borrow::Cow, @@ -45,7 +45,7 @@ use wasmtime::{ CacheStore, Engine, Store, component::{Component, ResourceTable}, }; -use wasmtime_wasi::{self as wasi, WasiView}; +use wasmtime_wasi::p2::{self as wasi, IoView as _}; use wit::Extension; pub struct WasmHost { @@ -68,7 +68,7 @@ pub struct WasmExtension { pub manifest: Arc, pub work_dir: Arc, #[allow(unused)] - pub zed_api_version: SemanticVersion, + pub zed_api_version: Version, _task: Arc>>, } @@ -537,7 +537,6 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine { let engine_ref = engine.weak(); executor .spawn(async move { - IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release)); // Somewhat arbitrary interval, as it isn't a guaranteed interval. // But this is a rough upper bound for how long the extension execution can block on // `Future::poll`. @@ -631,7 +630,7 @@ impl WasmHost { &executor, &mut store, this.release_channel, - zed_api_version, + zed_api_version.clone(), &component, ) .await?; @@ -643,6 +642,12 @@ impl WasmHost { let (tx, mut rx) = mpsc::unbounded::(); let extension_task = async move { + // note: Setting the thread local here will slowly "poison" all tokio threads + // causing us to not record their panics any longer. + // + // This is fine though, the main zed binary only uses tokio for livekit and wasm extensions. + // Livekit seldom (if ever) panics 🤞 so the likelihood of us missing a panic in sentry is very low. + IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release)); while let Some(call) = rx.next().await { (call)(&mut extension, &mut store).await; } @@ -659,8 +664,8 @@ impl WasmHost { cx.spawn(async move |cx| { let (extension_task, manifest, work_dir, tx, zed_api_version) = cx.background_executor().spawn(load_extension_task).await?; - // we need to run run the task in an extension context as wasmtime_wasi may - // call into tokio, accessing its runtime handle + // we need to run run the task in a tokio context as wasmtime_wasi may + // call into tokio, accessing its runtime handle when we trigger the `engine.increment_epoch()` above. let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)?); Ok(WasmExtension { @@ -680,8 +685,8 @@ impl WasmHost { .await .context("failed to create extension work dir")?; - let file_perms = wasi::FilePerms::all(); - let dir_perms = wasi::DirPerms::all(); + let file_perms = wasmtime_wasi::FilePerms::all(); + let dir_perms = wasmtime_wasi::DirPerms::all(); let path = SanitizedPath::new(&extension_work_dir).to_string(); #[cfg(target_os = "windows")] let path = path.replace('\\', "/"); @@ -708,10 +713,7 @@ impl WasmHost { } } -pub fn parse_wasm_extension_version( - extension_id: &str, - wasm_bytes: &[u8], -) -> Result { +pub fn parse_wasm_extension_version(extension_id: &str, wasm_bytes: &[u8]) -> Result { let mut version = None; for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) { @@ -738,9 +740,9 @@ pub fn parse_wasm_extension_version( version.with_context(|| format!("extension {extension_id} has no zed:api-version section")) } -fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option { +fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option { if data.len() == 6 { - Some(SemanticVersion::new( + Some(Version::new( u16::from_be_bytes([data[0], data[1]]) as _, u16::from_be_bytes([data[2], data[3]]) as _, u16::from_be_bytes([data[4], data[5]]) as _, @@ -763,17 +765,17 @@ impl WasmExtension { .fs .open_sync(&path) .await - .context("failed to open wasm file")?; + .context(format!("opening wasm file, path: {path:?}"))?; let mut wasm_bytes = Vec::new(); wasm_file .read_to_end(&mut wasm_bytes) - .context("failed to read wasm")?; + .context(format!("reading wasm file, path: {path:?}"))?; wasm_host .load_extension(wasm_bytes, manifest, cx) .await - .with_context(|| format!("failed to load wasm extension {}", manifest.id)) + .with_context(|| format!("loading wasm extension: {}", manifest.id)) } pub async fn call(&self, f: Fn) -> Result @@ -854,11 +856,13 @@ impl WasmState { } } -impl wasi::WasiView for WasmState { +impl wasi::IoView for WasmState { fn table(&mut self) -> &mut ResourceTable { &mut self.table } +} +impl wasi::WasiView for WasmState { fn ctx(&mut self) -> &mut wasi::WasiCtx { &mut self.ctx } diff --git a/crates/extension_host/src/wasm_host/wit.rs b/crates/extension_host/src/wasm_host/wit.rs index 1f1fa49bd535ad19f4981eeed9fcdca1ba9421a9..e080915b4fe1f18325843961db36e2fbc16bd418 100644 --- a/crates/extension_host/src/wasm_host/wit.rs +++ b/crates/extension_host/src/wasm_host/wit.rs @@ -7,6 +7,7 @@ mod since_v0_3_0; mod since_v0_4_0; mod since_v0_5_0; mod since_v0_6_0; +mod since_v0_8_0; use dap::DebugRequest; use extension::{DebugTaskDefinition, KeyValueStoreDelegate, WorktreeDelegate}; use gpui::BackgroundExecutor; @@ -19,8 +20,8 @@ use crate::wasm_host::wit::since_v0_6_0::dap::StartDebuggingRequestArgumentsRequ use super::{WasmState, wasm_engine}; use anyhow::{Context as _, Result, anyhow}; -use semantic_version::SemanticVersion; -use since_v0_6_0 as latest; +use semver::Version; +use since_v0_8_0 as latest; use std::{ops::RangeInclusive, path::PathBuf, sync::Arc}; use wasmtime::{ Store, @@ -44,7 +45,7 @@ pub fn new_linker( f: impl Fn(&mut Linker, fn(&mut WasmState) -> &mut WasmState) -> Result<()>, ) -> Linker { let mut linker = Linker::new(&wasm_engine(executor)); - wasmtime_wasi::add_to_linker_async(&mut linker).unwrap(); + wasmtime_wasi::p2::add_to_linker_async(&mut linker).unwrap(); f(&mut linker, wasi_view).unwrap(); linker } @@ -54,22 +55,19 @@ fn wasi_view(state: &mut WasmState) -> &mut WasmState { } /// Returns whether the given Wasm API version is supported by the Wasm host. -pub fn is_supported_wasm_api_version( - release_channel: ReleaseChannel, - version: SemanticVersion, -) -> bool { +pub fn is_supported_wasm_api_version(release_channel: ReleaseChannel, version: Version) -> bool { wasm_api_version_range(release_channel).contains(&version) } /// Returns the Wasm API version range that is supported by the Wasm host. #[inline(always)] -pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive { +pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive { // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; let max_version = match release_channel { ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION, - ReleaseChannel::Stable | ReleaseChannel::Preview => latest::MAX_VERSION, + ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_6_0::MAX_VERSION, }; since_v0_0_1::MIN_VERSION..=max_version @@ -98,6 +96,7 @@ pub fn authorize_access_to_unreleased_wasm_api_version( } pub enum Extension { + V0_8_0(since_v0_8_0::Extension), V0_6_0(since_v0_6_0::Extension), V0_5_0(since_v0_5_0::Extension), V0_4_0(since_v0_4_0::Extension), @@ -114,17 +113,28 @@ impl Extension { executor: &BackgroundExecutor, store: &mut Store, release_channel: ReleaseChannel, - version: SemanticVersion, + version: Version, component: &Component, ) -> Result { // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; if version >= latest::MIN_VERSION { + authorize_access_to_unreleased_wasm_api_version(release_channel)?; + let extension = latest::Extension::instantiate_async(store, component, latest::linker(executor)) .await .context("failed to instantiate wasm extension")?; + Ok(Self::V0_8_0(extension)) + } else if version >= since_v0_6_0::MIN_VERSION { + let extension = since_v0_6_0::Extension::instantiate_async( + store, + component, + since_v0_6_0::linker(executor), + ) + .await + .context("failed to instantiate wasm extension")?; Ok(Self::V0_6_0(extension)) } else if version >= since_v0_5_0::MIN_VERSION { let extension = since_v0_5_0::Extension::instantiate_async( @@ -203,6 +213,7 @@ impl Extension { pub async fn call_init_extension(&self, store: &mut Store) -> Result<()> { match self { + Extension::V0_8_0(ext) => ext.call_init_extension(store).await, Extension::V0_6_0(ext) => ext.call_init_extension(store).await, Extension::V0_5_0(ext) => ext.call_init_extension(store).await, Extension::V0_4_0(ext) => ext.call_init_extension(store).await, @@ -223,6 +234,10 @@ impl Extension { resource: Resource>, ) -> Result> { match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_command(store, &language_server_id.0, resource) + .await + } Extension::V0_6_0(ext) => { ext.call_language_server_command(store, &language_server_id.0, resource) .await @@ -285,6 +300,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_initialization_options( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V0_6_0(ext) => { ext.call_language_server_initialization_options( store, @@ -374,6 +397,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_workspace_configuration( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V0_6_0(ext) => { ext.call_language_server_workspace_configuration( store, @@ -442,6 +473,15 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_additional_initialization_options( + store, + &language_server_id.0, + &target_language_server_id.0, + resource, + ) + .await + } Extension::V0_6_0(ext) => { ext.call_language_server_additional_initialization_options( store, @@ -486,6 +526,15 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_additional_workspace_configuration( + store, + &language_server_id.0, + &target_language_server_id.0, + resource, + ) + .await + } Extension::V0_6_0(ext) => { ext.call_language_server_additional_workspace_configuration( store, @@ -529,10 +578,23 @@ impl Extension { completions: Vec, ) -> Result>, String>> { match self { - Extension::V0_6_0(ext) => { + Extension::V0_8_0(ext) => { ext.call_labels_for_completions(store, &language_server_id.0, &completions) .await } + Extension::V0_6_0(ext) => Ok(ext + .call_labels_for_completions( + store, + &language_server_id.0, + &completions.into_iter().collect::>(), + ) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V0_5_0(ext) => Ok(ext .call_labels_for_completions( store, @@ -622,10 +684,23 @@ impl Extension { symbols: Vec, ) -> Result>, String>> { match self { - Extension::V0_6_0(ext) => { + Extension::V0_8_0(ext) => { ext.call_labels_for_symbols(store, &language_server_id.0, &symbols) .await } + Extension::V0_6_0(ext) => Ok(ext + .call_labels_for_symbols( + store, + &language_server_id.0, + &symbols.into_iter().collect::>(), + ) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V0_5_0(ext) => Ok(ext .call_labels_for_symbols( store, @@ -715,6 +790,10 @@ impl Extension { arguments: &[String], ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_complete_slash_command_argument(store, command, arguments) + .await + } Extension::V0_6_0(ext) => { ext.call_complete_slash_command_argument(store, command, arguments) .await @@ -753,6 +832,10 @@ impl Extension { resource: Option>>, ) -> Result> { match self { + Extension::V0_8_0(ext) => { + ext.call_run_slash_command(store, command, arguments, resource) + .await + } Extension::V0_6_0(ext) => { ext.call_run_slash_command(store, command, arguments, resource) .await @@ -790,6 +873,10 @@ impl Extension { project: Resource, ) -> Result> { match self { + Extension::V0_8_0(ext) => { + ext.call_context_server_command(store, &context_server_id, project) + .await + } Extension::V0_6_0(ext) => { ext.call_context_server_command(store, &context_server_id, project) .await @@ -826,6 +913,10 @@ impl Extension { project: Resource, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => { + ext.call_context_server_configuration(store, &context_server_id, project) + .await + } Extension::V0_6_0(ext) => { ext.call_context_server_configuration(store, &context_server_id, project) .await @@ -852,6 +943,7 @@ impl Extension { provider: &str, ) -> Result, String>> { match self { + Extension::V0_8_0(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V0_6_0(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V0_5_0(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V0_4_0(ext) => ext.call_suggest_docs_packages(store, provider).await, @@ -872,6 +964,10 @@ impl Extension { kv_store: Resource>, ) -> Result> { match self { + Extension::V0_8_0(ext) => { + ext.call_index_docs(store, provider, package_name, kv_store) + .await + } Extension::V0_6_0(ext) => { ext.call_index_docs(store, provider, package_name, kv_store) .await @@ -901,6 +997,7 @@ impl Extension { } } } + pub async fn call_get_dap_binary( &self, store: &mut Store, @@ -927,6 +1024,7 @@ impl Extension { _ => anyhow::bail!("`get_dap_binary` not available prior to v0.6.0"), } } + pub async fn call_dap_request_kind( &self, store: &mut Store, @@ -947,6 +1045,7 @@ impl Extension { _ => anyhow::bail!("`dap_request_kind` not available prior to v0.6.0"), } } + pub async fn call_dap_config_to_scenario( &self, store: &mut Store, @@ -965,6 +1064,7 @@ impl Extension { _ => anyhow::bail!("`dap_config_to_scenario` not available prior to v0.6.0"), } } + pub async fn call_dap_locator_create_scenario( &self, store: &mut Store, @@ -991,6 +1091,7 @@ impl Extension { _ => anyhow::bail!("`dap_locator_create_scenario` not available prior to v0.6.0"), } } + pub async fn call_run_dap_locator( &self, store: &mut Store, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs index 168dea4a22b4d836277860eebd74c19ed9d31847..17d5c00a9ad08507bbad39190fdfe5134fe77aa1 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_1.rs @@ -5,11 +5,11 @@ use anyhow::Result; use extension::{ExtensionLanguageServerProxy, WorktreeDelegate}; use gpui::BackgroundExecutor; use language::BinaryStatus; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 1); +pub const MIN_VERSION: Version = Version::new(0, 0, 1); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs index 31f752080b6eabe8cf63e38c002fa145b061fb13..11b2e9f66187ea04983b83ace5814620e7ae7f53 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs @@ -3,11 +3,11 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::WorktreeDelegate; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 4); +pub const MIN_VERSION: Version = Version::new(0, 0, 4); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs index 2fc29abadb2eb60d051b37e072727931aee72d69..835a2b30fbadd3d54649d075b588fd79532c5186 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_6.rs @@ -3,11 +3,11 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::WorktreeDelegate; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 6); +pub const MIN_VERSION: Version = Version::new(0, 0, 6); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index 6e6eca975d92f9c8cf5eb206f04da5fccc3f097c..a7a20f6dc7f1dbedddf34a13032887adf5b61a6e 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -11,7 +11,7 @@ use gpui::BackgroundExecutor; use language::LanguageName; use language::{BinaryStatus, language_settings::AllLanguageSettings}; use project::project_settings::ProjectSettings; -use semantic_version::SemanticVersion; +use semver::Version; use std::{ path::{Path, PathBuf}, sync::{Arc, OnceLock}, @@ -23,7 +23,7 @@ use wasmtime::component::{Linker, Resource}; use super::latest; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0); +pub const MIN_VERSION: Version = Version::new(0, 1, 0); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs index 9475438b660d2e126ae6ca24d276795d51d4ce8b..05e3f5a4e7e2997bb40699c2ac8b7e02c71b1a77 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_2_0.rs @@ -2,13 +2,13 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::{KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate}; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; use super::latest; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0); +pub const MIN_VERSION: Version = Version::new(0, 2, 0); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs index b6a75ba7dda6ded2e074a2ece35b4b3f881f1619..08393934fe365640ed4c82172a33a71381edbc54 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_3_0.rs @@ -2,13 +2,13 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::{KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate}; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; use super::latest; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 3, 0); +pub const MIN_VERSION: Version = Version::new(0, 3, 0); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs index 7c8be1322f94e35ded911d64e13f5afb4bf3702c..1b2a95023b611d9366b47faeb9b3a43c81cc24e7 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_4_0.rs @@ -2,13 +2,13 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::{KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate}; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; use super::latest; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 4, 0); +pub const MIN_VERSION: Version = Version::new(0, 4, 0); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs index 6d04663de7772e9c965cf1b88840727cfdcb4b59..23701c9d03f3dccd908a06d90d4d1fe11e74af5c 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_5_0.rs @@ -2,13 +2,13 @@ use crate::wasm_host::WasmState; use anyhow::Result; use extension::{KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate}; use gpui::BackgroundExecutor; -use semantic_version::SemanticVersion; +use semver::Version; use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; use super::latest; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 5, 0); +pub const MIN_VERSION: Version = Version::new(0, 5, 0); wasmtime::component::bindgen!({ async: true, diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index 8b44efdfb196d93df0a609983c2b97147bbe38a8..8595c278b95a433f782ea5c53e2c97c75aa353da 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -1,53 +1,34 @@ -use crate::wasm_host::wit::since_v0_6_0::{ - dap::{ - AttachRequest, BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, LaunchRequest, - StartDebuggingRequestArguments, TcpArguments, TcpArgumentsTemplate, - }, - slash_command::SlashCommandOutputSection, -}; -use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFormat, SymbolKind}; -use crate::wasm_host::{WasmState, wit::ToWasmtimeResult}; -use ::http_client::{AsyncBody, HttpRequestExt}; -use ::settings::{Settings, WorktreeId}; -use anyhow::{Context as _, Result, bail}; -use async_compression::futures::bufread::GzipDecoder; -use async_tar::Archive; -use async_trait::async_trait; -use extension::{ - ExtensionLanguageServerProxy, KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate, -}; -use futures::{AsyncReadExt, lock::Mutex}; -use futures::{FutureExt as _, io::BufReader}; -use gpui::{BackgroundExecutor, SharedString}; -use language::{BinaryStatus, LanguageName, language_settings::AllLanguageSettings}; -use project::project_settings::ProjectSettings; -use semantic_version::SemanticVersion; -use std::{ - env, - net::Ipv4Addr, - path::{Path, PathBuf}, - str::FromStr, - sync::{Arc, OnceLock}, -}; -use task::{SpawnInTerminal, ZedDebugConfig}; -use url::Url; -use util::{ - archive::extract_zip, fs::make_file_executable, maybe, paths::PathStyle, rel_path::RelPath, -}; +use crate::wasm_host::WasmState; +use anyhow::Result; +use extension::{KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate}; +use gpui::BackgroundExecutor; +use semver::Version; +use std::sync::{Arc, OnceLock}; use wasmtime::component::{Linker, Resource}; -pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 6, 0); -pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 7, 0); +use super::latest; + +pub const MIN_VERSION: Version = Version::new(0, 6, 0); +pub const MAX_VERSION: Version = Version::new(0, 7, 0); wasmtime::component::bindgen!({ async: true, trappable_imports: true, path: "../extension_api/wit/since_v0.6.0", with: { - "worktree": ExtensionWorktree, - "project": ExtensionProject, - "key-value-store": ExtensionKeyValueStore, - "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + "worktree": ExtensionWorktree, + "project": ExtensionProject, + "key-value-store": ExtensionKeyValueStore, + "zed:extension/common": latest::zed::extension::common, + "zed:extension/github": latest::zed::extension::github, + "zed:extension/http-client": latest::zed::extension::http_client, + "zed:extension/lsp": latest::zed::extension::lsp, + "zed:extension/nodejs": latest::zed::extension::nodejs, + "zed:extension/platform": latest::zed::extension::platform, + "zed:extension/process": latest::zed::extension::process, + "zed:extension/slash-command": latest::zed::extension::slash_command, + "zed:extension/context-server": latest::zed::extension::context_server, + "zed:extension/dap": latest::zed::extension::dap, }, }); @@ -61,289 +42,32 @@ mod settings { pub type ExtensionWorktree = Arc; pub type ExtensionProject = Arc; pub type ExtensionKeyValueStore = Arc; -pub type ExtensionHttpResponseStream = Arc>>; pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { static LINKER: OnceLock> = OnceLock::new(); LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) } -impl From for std::ops::Range { - fn from(range: Range) -> Self { - let start = range.start as usize; - let end = range.end as usize; - start..end - } -} - -impl From for extension::Command { - fn from(value: Command) -> Self { - Self { - command: value.command.into(), - args: value.args, - env: value.env, - } - } -} - -impl From - for extension::StartDebuggingRequestArgumentsRequest -{ - fn from(value: StartDebuggingRequestArgumentsRequest) -> Self { - match value { - StartDebuggingRequestArgumentsRequest::Launch => Self::Launch, - StartDebuggingRequestArgumentsRequest::Attach => Self::Attach, - } - } -} -impl TryFrom for extension::StartDebuggingRequestArguments { - type Error = anyhow::Error; - - fn try_from(value: StartDebuggingRequestArguments) -> Result { - Ok(Self { - configuration: serde_json::from_str(&value.configuration)?, - request: value.request.into(), - }) - } -} -impl From for extension::TcpArguments { - fn from(value: TcpArguments) -> Self { - Self { - host: value.host.into(), - port: value.port, - timeout: value.timeout, - } - } -} - -impl From for TcpArgumentsTemplate { - fn from(value: extension::TcpArgumentsTemplate) -> Self { - Self { - host: value.host.map(Ipv4Addr::to_bits), - port: value.port, - timeout: value.timeout, - } - } -} - -impl From for extension::TcpArgumentsTemplate { - fn from(value: TcpArgumentsTemplate) -> Self { - Self { - host: value.host.map(Ipv4Addr::from_bits), - port: value.port, - timeout: value.timeout, - } - } -} - -impl TryFrom for DebugTaskDefinition { - type Error = anyhow::Error; - fn try_from(value: extension::DebugTaskDefinition) -> Result { - Ok(Self { - label: value.label.to_string(), - adapter: value.adapter.to_string(), - config: value.config.to_string(), - tcp_connection: value.tcp_connection.map(Into::into), - }) - } -} - -impl From for DebugRequest { - fn from(value: task::DebugRequest) -> Self { - match value { - task::DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()), - task::DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()), - } - } -} - -impl From for task::DebugRequest { - fn from(value: DebugRequest) -> Self { - match value { - DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()), - DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()), - } - } -} - -impl From for LaunchRequest { - fn from(value: task::LaunchRequest) -> Self { - Self { - program: value.program, - cwd: value.cwd.map(|p| p.to_string_lossy().into_owned()), - args: value.args, - envs: value.env.into_iter().collect(), - } - } -} - -impl From for AttachRequest { - fn from(value: task::AttachRequest) -> Self { - Self { - process_id: value.process_id, - } - } -} - -impl From for task::LaunchRequest { - fn from(value: LaunchRequest) -> Self { - Self { - program: value.program, - cwd: value.cwd.map(|p| p.into()), - args: value.args, - env: value.envs.into_iter().collect(), - } - } -} -impl From for task::AttachRequest { - fn from(value: AttachRequest) -> Self { - Self { - process_id: value.process_id, - } - } -} - -impl From for DebugConfig { - fn from(value: ZedDebugConfig) -> Self { - Self { - label: value.label.into(), - adapter: value.adapter.into(), - request: value.request.into(), - stop_on_entry: value.stop_on_entry, - } - } -} -impl TryFrom for extension::DebugAdapterBinary { - type Error = anyhow::Error; - fn try_from(value: DebugAdapterBinary) -> Result { - Ok(Self { - command: value.command, - arguments: value.arguments, - envs: value.envs.into_iter().collect(), - cwd: value.cwd.map(|s| s.into()), - connection: value.connection.map(Into::into), - request_args: value.request_args.try_into()?, - }) - } -} - -impl From for extension::BuildTaskDefinition { - fn from(value: BuildTaskDefinition) -> Self { - match value { - BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), - BuildTaskDefinition::Template(build_task_template) => Self::Template { - task_template: build_task_template.template.into(), - locator_name: build_task_template.locator_name.map(SharedString::from), - }, - } - } -} - -impl From for BuildTaskDefinition { - fn from(value: extension::BuildTaskDefinition) -> Self { - match value { - extension::BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), - extension::BuildTaskDefinition::Template { - task_template, - locator_name, - } => Self::Template(BuildTaskDefinitionTemplatePayload { - template: task_template.into(), - locator_name: locator_name.map(String::from), - }), - } - } -} -impl From for extension::BuildTaskTemplate { - fn from(value: BuildTaskTemplate) -> Self { - Self { - label: value.label, - command: value.command, - args: value.args, - env: value.env.into_iter().collect(), - cwd: value.cwd, - ..Default::default() - } - } -} -impl From for BuildTaskTemplate { - fn from(value: extension::BuildTaskTemplate) -> Self { - Self { - label: value.label, - command: value.command, - args: value.args, - env: value.env.into_iter().collect(), - cwd: value.cwd, - } - } -} - -impl TryFrom for extension::DebugScenario { - type Error = anyhow::Error; - - fn try_from(value: DebugScenario) -> std::result::Result { - Ok(Self { - adapter: value.adapter.into(), - label: value.label.into(), - build: value.build.map(Into::into), - config: serde_json::Value::from_str(&value.config)?, - tcp_connection: value.tcp_connection.map(Into::into), - }) - } -} - -impl From for DebugScenario { - fn from(value: extension::DebugScenario) -> Self { - Self { - adapter: value.adapter.into(), - label: value.label.into(), - build: value.build.map(Into::into), - config: value.config.to_string(), - tcp_connection: value.tcp_connection.map(Into::into), - } - } -} - -impl TryFrom for ResolvedTask { - type Error = anyhow::Error; - - fn try_from(value: SpawnInTerminal) -> Result { - Ok(Self { - label: value.label, - command: value.command.context("missing command")?, - args: value.args, - env: value.env.into_iter().collect(), - cwd: value.cwd.map(|s| { - let s = s.to_string_lossy(); - if cfg!(target_os = "windows") { - s.replace('\\', "/") - } else { - s.into_owned() - } - }), - }) - } -} - -impl From for extension::CodeLabel { +impl From for latest::CodeLabel { fn from(value: CodeLabel) -> Self { Self { code: value.code, spans: value.spans.into_iter().map(Into::into).collect(), - filter_range: value.filter_range.into(), + filter_range: value.filter_range, } } } -impl From for extension::CodeLabelSpan { +impl From for latest::CodeLabelSpan { fn from(value: CodeLabelSpan) -> Self { match value { - CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()), + CodeLabelSpan::CodeRange(range) => Self::CodeRange(range), CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()), } } } -impl From for extension::CodeLabelSpanLiteral { +impl From for latest::CodeLabelSpanLiteral { fn from(value: CodeLabelSpanLiteral) -> Self { Self { text: value.text, @@ -352,167 +76,37 @@ impl From for extension::CodeLabelSpanLiteral { } } -impl From for Completion { - fn from(value: extension::Completion) -> Self { +impl From for latest::SettingsLocation { + fn from(value: SettingsLocation) -> Self { Self { - label: value.label, - label_details: value.label_details.map(Into::into), - detail: value.detail, - kind: value.kind.map(Into::into), - insert_text_format: value.insert_text_format.map(Into::into), + worktree_id: value.worktree_id, + path: value.path, } } } -impl From for CompletionLabelDetails { - fn from(value: extension::CompletionLabelDetails) -> Self { - Self { - detail: value.detail, - description: value.description, - } - } -} - -impl From for CompletionKind { - fn from(value: extension::CompletionKind) -> Self { +impl From for latest::LanguageServerInstallationStatus { + fn from(value: LanguageServerInstallationStatus) -> Self { match value { - extension::CompletionKind::Text => Self::Text, - extension::CompletionKind::Method => Self::Method, - extension::CompletionKind::Function => Self::Function, - extension::CompletionKind::Constructor => Self::Constructor, - extension::CompletionKind::Field => Self::Field, - extension::CompletionKind::Variable => Self::Variable, - extension::CompletionKind::Class => Self::Class, - extension::CompletionKind::Interface => Self::Interface, - extension::CompletionKind::Module => Self::Module, - extension::CompletionKind::Property => Self::Property, - extension::CompletionKind::Unit => Self::Unit, - extension::CompletionKind::Value => Self::Value, - extension::CompletionKind::Enum => Self::Enum, - extension::CompletionKind::Keyword => Self::Keyword, - extension::CompletionKind::Snippet => Self::Snippet, - extension::CompletionKind::Color => Self::Color, - extension::CompletionKind::File => Self::File, - extension::CompletionKind::Reference => Self::Reference, - extension::CompletionKind::Folder => Self::Folder, - extension::CompletionKind::EnumMember => Self::EnumMember, - extension::CompletionKind::Constant => Self::Constant, - extension::CompletionKind::Struct => Self::Struct, - extension::CompletionKind::Event => Self::Event, - extension::CompletionKind::Operator => Self::Operator, - extension::CompletionKind::TypeParameter => Self::TypeParameter, - extension::CompletionKind::Other(value) => Self::Other(value), + LanguageServerInstallationStatus::None => Self::None, + LanguageServerInstallationStatus::Downloading => Self::Downloading, + LanguageServerInstallationStatus::CheckingForUpdate => Self::CheckingForUpdate, + LanguageServerInstallationStatus::Failed(message) => Self::Failed(message), } } } -impl From for InsertTextFormat { - fn from(value: extension::InsertTextFormat) -> Self { +impl From for latest::DownloadedFileType { + fn from(value: DownloadedFileType) -> Self { match value { - extension::InsertTextFormat::PlainText => Self::PlainText, - extension::InsertTextFormat::Snippet => Self::Snippet, - extension::InsertTextFormat::Other(value) => Self::Other(value), + DownloadedFileType::Gzip => Self::Gzip, + DownloadedFileType::GzipTar => Self::GzipTar, + DownloadedFileType::Zip => Self::Zip, + DownloadedFileType::Uncompressed => Self::Uncompressed, } } } -impl From for Symbol { - fn from(value: extension::Symbol) -> Self { - Self { - kind: value.kind.into(), - name: value.name, - } - } -} - -impl From for SymbolKind { - fn from(value: extension::SymbolKind) -> Self { - match value { - extension::SymbolKind::File => Self::File, - extension::SymbolKind::Module => Self::Module, - extension::SymbolKind::Namespace => Self::Namespace, - extension::SymbolKind::Package => Self::Package, - extension::SymbolKind::Class => Self::Class, - extension::SymbolKind::Method => Self::Method, - extension::SymbolKind::Property => Self::Property, - extension::SymbolKind::Field => Self::Field, - extension::SymbolKind::Constructor => Self::Constructor, - extension::SymbolKind::Enum => Self::Enum, - extension::SymbolKind::Interface => Self::Interface, - extension::SymbolKind::Function => Self::Function, - extension::SymbolKind::Variable => Self::Variable, - extension::SymbolKind::Constant => Self::Constant, - extension::SymbolKind::String => Self::String, - extension::SymbolKind::Number => Self::Number, - extension::SymbolKind::Boolean => Self::Boolean, - extension::SymbolKind::Array => Self::Array, - extension::SymbolKind::Object => Self::Object, - extension::SymbolKind::Key => Self::Key, - extension::SymbolKind::Null => Self::Null, - extension::SymbolKind::EnumMember => Self::EnumMember, - extension::SymbolKind::Struct => Self::Struct, - extension::SymbolKind::Event => Self::Event, - extension::SymbolKind::Operator => Self::Operator, - extension::SymbolKind::TypeParameter => Self::TypeParameter, - extension::SymbolKind::Other(value) => Self::Other(value), - } - } -} - -impl From for SlashCommand { - fn from(value: extension::SlashCommand) -> Self { - Self { - name: value.name, - description: value.description, - tooltip_text: value.tooltip_text, - requires_argument: value.requires_argument, - } - } -} - -impl From for extension::SlashCommandOutput { - fn from(value: SlashCommandOutput) -> Self { - Self { - text: value.text, - sections: value.sections.into_iter().map(Into::into).collect(), - } - } -} - -impl From for extension::SlashCommandOutputSection { - fn from(value: SlashCommandOutputSection) -> Self { - Self { - range: value.range.start as usize..value.range.end as usize, - label: value.label, - } - } -} - -impl From for extension::SlashCommandArgumentCompletion { - fn from(value: SlashCommandArgumentCompletion) -> Self { - Self { - label: value.label, - new_text: value.new_text, - run_command: value.run_command, - } - } -} - -impl TryFrom for extension::ContextServerConfiguration { - type Error = anyhow::Error; - - fn try_from(value: ContextServerConfiguration) -> Result { - let settings_schema: serde_json::Value = serde_json::from_str(&value.settings_schema) - .context("Failed to parse settings_schema")?; - - Ok(Self { - installation_instructions: value.installation_instructions, - default_settings: value.default_settings, - settings_schema, - }) - } -} - impl HostKeyValueStore for WasmState { async fn insert( &mut self, @@ -520,8 +114,7 @@ impl HostKeyValueStore for WasmState { key: String, value: String, ) -> wasmtime::Result> { - let kv_store = self.table.get(&kv_store)?; - kv_store.insert(key, value).await.to_wasmtime_result() + latest::HostKeyValueStore::insert(self, kv_store, key, value).await } async fn drop(&mut self, _worktree: Resource) -> Result<()> { @@ -535,8 +128,7 @@ impl HostProject for WasmState { &mut self, project: Resource, ) -> wasmtime::Result> { - let project = self.table.get(&project)?; - Ok(project.worktree_ids()) + latest::HostProject::worktree_ids(self, project).await } async fn drop(&mut self, _project: Resource) -> Result<()> { @@ -547,16 +139,14 @@ impl HostProject for WasmState { impl HostWorktree for WasmState { async fn id(&mut self, delegate: Resource>) -> wasmtime::Result { - let delegate = self.table.get(&delegate)?; - Ok(delegate.id()) + latest::HostWorktree::id(self, delegate).await } async fn root_path( &mut self, delegate: Resource>, ) -> wasmtime::Result { - let delegate = self.table.get(&delegate)?; - Ok(delegate.root_path()) + latest::HostWorktree::root_path(self, delegate).await } async fn read_text_file( @@ -564,19 +154,14 @@ impl HostWorktree for WasmState { delegate: Resource>, path: String, ) -> wasmtime::Result> { - let delegate = self.table.get(&delegate)?; - Ok(delegate - .read_text_file(&RelPath::new(Path::new(&path), PathStyle::Posix)?) - .await - .map_err(|error| error.to_string())) + latest::HostWorktree::read_text_file(self, delegate, path).await } async fn shell_env( &mut self, delegate: Resource>, ) -> wasmtime::Result { - let delegate = self.table.get(&delegate)?; - Ok(delegate.shell_env().await.into_iter().collect()) + latest::HostWorktree::shell_env(self, delegate).await } async fn which( @@ -584,8 +169,7 @@ impl HostWorktree for WasmState { delegate: Resource>, binary_name: String, ) -> wasmtime::Result> { - let delegate = self.table.get(&delegate)?; - Ok(delegate.which(binary_name).await) + latest::HostWorktree::which(self, delegate, binary_name).await } async fn drop(&mut self, _worktree: Resource) -> Result<()> { @@ -594,319 +178,6 @@ impl HostWorktree for WasmState { } } -impl common::Host for WasmState {} - -impl http_client::Host for WasmState { - async fn fetch( - &mut self, - request: http_client::HttpRequest, - ) -> wasmtime::Result> { - maybe!(async { - let url = &request.url; - let request = convert_request(&request)?; - let mut response = self.host.http_client.send(request).await?; - - if response.status().is_client_error() || response.status().is_server_error() { - bail!("failed to fetch '{url}': status code {}", response.status()) - } - convert_response(&mut response).await - }) - .await - .to_wasmtime_result() - } - - async fn fetch_stream( - &mut self, - request: http_client::HttpRequest, - ) -> wasmtime::Result, String>> { - let request = convert_request(&request)?; - let response = self.host.http_client.send(request); - maybe!(async { - let response = response.await?; - let stream = Arc::new(Mutex::new(response)); - let resource = self.table.push(stream)?; - Ok(resource) - }) - .await - .to_wasmtime_result() - } -} - -impl http_client::HostHttpResponseStream for WasmState { - async fn next_chunk( - &mut self, - resource: Resource, - ) -> wasmtime::Result>, String>> { - let stream = self.table.get(&resource)?.clone(); - maybe!(async move { - let mut response = stream.lock().await; - let mut buffer = vec![0; 8192]; // 8KB buffer - let bytes_read = response.body_mut().read(&mut buffer).await?; - if bytes_read == 0 { - Ok(None) - } else { - buffer.truncate(bytes_read); - Ok(Some(buffer)) - } - }) - .await - .to_wasmtime_result() - } - - async fn drop(&mut self, _resource: Resource) -> Result<()> { - Ok(()) - } -} - -impl From for ::http_client::Method { - fn from(value: http_client::HttpMethod) -> Self { - match value { - http_client::HttpMethod::Get => Self::GET, - http_client::HttpMethod::Post => Self::POST, - http_client::HttpMethod::Put => Self::PUT, - http_client::HttpMethod::Delete => Self::DELETE, - http_client::HttpMethod::Head => Self::HEAD, - http_client::HttpMethod::Options => Self::OPTIONS, - http_client::HttpMethod::Patch => Self::PATCH, - } - } -} - -fn convert_request( - extension_request: &http_client::HttpRequest, -) -> anyhow::Result<::http_client::Request> { - let mut request = ::http_client::Request::builder() - .method(::http_client::Method::from(extension_request.method)) - .uri(&extension_request.url) - .follow_redirects(match extension_request.redirect_policy { - http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, - http_client::RedirectPolicy::FollowLimit(limit) => { - ::http_client::RedirectPolicy::FollowLimit(limit) - } - http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, - }); - for (key, value) in &extension_request.headers { - request = request.header(key, value); - } - let body = extension_request - .body - .clone() - .map(AsyncBody::from) - .unwrap_or_default(); - request.body(body).map_err(anyhow::Error::from) -} - -async fn convert_response( - response: &mut ::http_client::Response, -) -> anyhow::Result { - let mut extension_response = http_client::HttpResponse { - body: Vec::new(), - headers: Vec::new(), - }; - - for (key, value) in response.headers() { - extension_response - .headers - .push((key.to_string(), value.to_str().unwrap_or("").to_string())); - } - - response - .body_mut() - .read_to_end(&mut extension_response.body) - .await?; - - Ok(extension_response) -} - -impl nodejs::Host for WasmState { - async fn node_binary_path(&mut self) -> wasmtime::Result> { - self.host - .node_runtime - .binary_path() - .await - .map(|path| path.to_string_lossy().into_owned()) - .to_wasmtime_result() - } - - async fn npm_package_latest_version( - &mut self, - package_name: String, - ) -> wasmtime::Result> { - self.host - .node_runtime - .npm_package_latest_version(&package_name) - .await - .to_wasmtime_result() - } - - async fn npm_package_installed_version( - &mut self, - package_name: String, - ) -> wasmtime::Result, String>> { - self.host - .node_runtime - .npm_package_installed_version(&self.work_dir(), &package_name) - .await - .to_wasmtime_result() - } - - async fn npm_install_package( - &mut self, - package_name: String, - version: String, - ) -> wasmtime::Result> { - self.capability_granter - .grant_npm_install_package(&package_name)?; - - self.host - .node_runtime - .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl lsp::Host for WasmState {} - -impl From<::http_client::github::GithubRelease> for github::GithubRelease { - fn from(value: ::http_client::github::GithubRelease) -> Self { - Self { - version: value.tag_name, - assets: value.assets.into_iter().map(Into::into).collect(), - } - } -} - -impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { - fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { - Self { - name: value.name, - download_url: value.browser_download_url, - } - } -} - -impl github::Host for WasmState { - async fn latest_github_release( - &mut self, - repo: String, - options: github::GithubReleaseOptions, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::latest_github_release( - &repo, - options.require_assets, - options.pre_release, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } - - async fn github_release_by_tag_name( - &mut self, - repo: String, - tag: String, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::get_release_by_tag_name( - &repo, - &tag, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } -} - -impl platform::Host for WasmState { - async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { - Ok(( - match env::consts::OS { - "macos" => platform::Os::Mac, - "linux" => platform::Os::Linux, - "windows" => platform::Os::Windows, - _ => panic!("unsupported os"), - }, - match env::consts::ARCH { - "aarch64" => platform::Architecture::Aarch64, - "x86" => platform::Architecture::X86, - "x86_64" => platform::Architecture::X8664, - _ => panic!("unsupported architecture"), - }, - )) - } -} - -impl From for process::Output { - fn from(output: std::process::Output) -> Self { - Self { - status: output.status.code(), - stdout: output.stdout, - stderr: output.stderr, - } - } -} - -impl process::Host for WasmState { - async fn run_command( - &mut self, - command: process::Command, - ) -> wasmtime::Result> { - maybe!(async { - self.capability_granter - .grant_exec(&command.command, &command.args)?; - - let output = util::command::new_smol_command(command.command.as_str()) - .args(&command.args) - .envs(command.env) - .output() - .await?; - - Ok(output.into()) - }) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl slash_command::Host for WasmState {} - -#[async_trait] -impl context_server::Host for WasmState {} - -impl dap::Host for WasmState { - async fn resolve_tcp_template( - &mut self, - template: TcpArgumentsTemplate, - ) -> wasmtime::Result> { - maybe!(async { - let (host, port, timeout) = - ::dap::configure_tcp_connection(task::TcpArgumentsTemplate { - port: template.port, - host: template.host.map(Ipv4Addr::from_bits), - timeout: template.timeout, - }) - .await?; - Ok(TcpArguments { - port, - host: host.to_bits(), - timeout, - }) - }) - .await - .to_wasmtime_result() - } -} - impl ExtensionImports for WasmState { async fn get_settings( &mut self, @@ -914,93 +185,13 @@ impl ExtensionImports for WasmState { category: String, key: Option, ) -> wasmtime::Result> { - self.on_main_thread(|cx| { - async move { - let path = location.as_ref().and_then(|location| { - RelPath::new(Path::new(&location.path), PathStyle::Posix).ok() - }); - let location = path - .as_ref() - .zip(location.as_ref()) - .map(|(path, location)| ::settings::SettingsLocation { - worktree_id: WorktreeId::from_proto(location.worktree_id), - path, - }); - - cx.update(|cx| match category.as_str() { - "language" => { - let key = key.map(|k| LanguageName::new(&k)); - let settings = AllLanguageSettings::get(location, cx).language( - location, - key.as_ref(), - cx, - ); - Ok(serde_json::to_string(&settings::LanguageSettings { - tab_size: settings.tab_size, - })?) - } - "lsp" => { - let settings = key - .and_then(|key| { - ProjectSettings::get(location, cx) - .lsp - .get(&::lsp::LanguageServerName::from_proto(key)) - }) - .cloned() - .unwrap_or_default(); - Ok(serde_json::to_string(&settings::LspSettings { - binary: settings.binary.map(|binary| settings::CommandSettings { - path: binary.path, - arguments: binary.arguments, - env: binary.env.map(|env| env.into_iter().collect()), - }), - settings: settings.settings, - initialization_options: settings.initialization_options, - })?) - } - "context_servers" => { - let settings = key - .and_then(|key| { - ProjectSettings::get(location, cx) - .context_servers - .get(key.as_str()) - }) - .cloned() - .unwrap_or_else(|| { - project::project_settings::ContextServerSettings::default_extension( - ) - }); - - match settings { - project::project_settings::ContextServerSettings::Custom { - enabled: _, - command, - } => Ok(serde_json::to_string(&settings::ContextServerSettings { - command: Some(settings::CommandSettings { - path: command.path.to_str().map(|path| path.to_string()), - arguments: Some(command.args), - env: command.env.map(|env| env.into_iter().collect()), - }), - settings: None, - })?), - project::project_settings::ContextServerSettings::Extension { - enabled: _, - settings, - } => Ok(serde_json::to_string(&settings::ContextServerSettings { - command: None, - settings: Some(settings), - })?), - } - } - _ => { - bail!("Unknown settings category: {}", category); - } - }) - } - .boxed_local() - }) - .await? - .to_wasmtime_result() + latest::ExtensionImports::get_settings( + self, + location.map(|location| location.into()), + category, + key, + ) + .await } async fn set_language_server_installation_status( @@ -1008,18 +199,12 @@ impl ExtensionImports for WasmState { server_name: String, status: LanguageServerInstallationStatus, ) -> wasmtime::Result<()> { - let status = match status { - LanguageServerInstallationStatus::CheckingForUpdate => BinaryStatus::CheckingForUpdate, - LanguageServerInstallationStatus::Downloading => BinaryStatus::Downloading, - LanguageServerInstallationStatus::None => BinaryStatus::None, - LanguageServerInstallationStatus::Failed(error) => BinaryStatus::Failed { error }, - }; - - self.host - .proxy - .update_language_server_status(::lsp::LanguageServerName(server_name.into()), status); - - Ok(()) + latest::ExtensionImports::set_language_server_installation_status( + self, + server_name, + status.into(), + ) + .await } async fn download_file( @@ -1028,79 +213,10 @@ impl ExtensionImports for WasmState { path: String, file_type: DownloadedFileType, ) -> wasmtime::Result> { - maybe!(async { - let parsed_url = Url::parse(&url)?; - self.capability_granter.grant_download_file(&parsed_url)?; - - let path = PathBuf::from(path); - let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref()); - - self.host.fs.create_dir(&extension_work_dir).await?; - - let destination_path = self - .host - .writeable_path_from_extension(&self.manifest.id, &path)?; - - let mut response = self - .host - .http_client - .get(&url, Default::default(), true) - .await - .context("downloading release")?; - - anyhow::ensure!( - response.status().is_success(), - "download failed with status {}", - response.status() - ); - let body = BufReader::new(response.body_mut()); - - match file_type { - DownloadedFileType::Uncompressed => { - futures::pin_mut!(body); - self.host - .fs - .create_file_with(&destination_path, body) - .await?; - } - DownloadedFileType::Gzip => { - let body = GzipDecoder::new(body); - futures::pin_mut!(body); - self.host - .fs - .create_file_with(&destination_path, body) - .await?; - } - DownloadedFileType::GzipTar => { - let body = GzipDecoder::new(body); - futures::pin_mut!(body); - self.host - .fs - .extract_tar_file(&destination_path, Archive::new(body)) - .await?; - } - DownloadedFileType::Zip => { - futures::pin_mut!(body); - extract_zip(&destination_path, body) - .await - .with_context(|| format!("unzipping {path:?} archive"))?; - } - } - - Ok(()) - }) - .await - .to_wasmtime_result() + latest::ExtensionImports::download_file(self, url, path, file_type.into()).await } async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { - let path = self - .host - .writeable_path_from_extension(&self.manifest.id, Path::new(&path))?; - - make_file_executable(&path) - .await - .with_context(|| format!("setting permissions for path {path:?}")) - .to_wasmtime_result() + latest::ExtensionImports::make_file_executable(self, path).await } } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs new file mode 100644 index 0000000000000000000000000000000000000000..8b3f8e86b71e959eade1e5d3710ce66b5b2d3008 --- /dev/null +++ b/crates/extension_host/src/wasm_host/wit/since_v0_8_0.rs @@ -0,0 +1,1111 @@ +use crate::wasm_host::wit::since_v0_6_0::{ + dap::{ + BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, StartDebuggingRequestArguments, + TcpArguments, TcpArgumentsTemplate, + }, + slash_command::SlashCommandOutputSection, +}; +use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFormat, SymbolKind}; +use crate::wasm_host::{WasmState, wit::ToWasmtimeResult}; +use ::http_client::{AsyncBody, HttpRequestExt}; +use ::settings::{Settings, WorktreeId}; +use anyhow::{Context as _, Result, bail}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use async_trait::async_trait; +use extension::{ + ExtensionLanguageServerProxy, KeyValueStoreDelegate, ProjectDelegate, WorktreeDelegate, +}; +use futures::{AsyncReadExt, lock::Mutex}; +use futures::{FutureExt as _, io::BufReader}; +use gpui::{BackgroundExecutor, SharedString}; +use language::{BinaryStatus, LanguageName, language_settings::AllLanguageSettings}; +use project::project_settings::ProjectSettings; +use semver::Version; +use std::{ + env, + net::Ipv4Addr, + path::{Path, PathBuf}, + str::FromStr, + sync::{Arc, OnceLock}, +}; +use task::{SpawnInTerminal, ZedDebugConfig}; +use url::Url; +use util::{ + archive::extract_zip, fs::make_file_executable, maybe, paths::PathStyle, rel_path::RelPath, +}; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: Version = Version::new(0, 8, 0); +pub const MAX_VERSION: Version = Version::new(0, 8, 0); + +wasmtime::component::bindgen!({ + async: true, + trappable_imports: true, + path: "../extension_api/wit/since_v0.8.0", + with: { + "worktree": ExtensionWorktree, + "project": ExtensionProject, + "key-value-store": ExtensionKeyValueStore, + "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + }, +}); + +pub use self::zed::extension::*; + +mod settings { + #![allow(dead_code)] + include!(concat!(env!("OUT_DIR"), "/since_v0.8.0/settings.rs")); +} + +pub type ExtensionWorktree = Arc; +pub type ExtensionProject = Arc; +pub type ExtensionKeyValueStore = Arc; +pub type ExtensionHttpResponseStream = Arc>>; + +pub fn linker(executor: &BackgroundExecutor) -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(executor, Extension::add_to_linker)) +} + +impl From for std::ops::Range { + fn from(range: Range) -> Self { + let start = range.start as usize; + let end = range.end as usize; + start..end + } +} + +impl From for extension::Command { + fn from(value: Command) -> Self { + Self { + command: value.command.into(), + args: value.args, + env: value.env, + } + } +} + +impl From + for extension::StartDebuggingRequestArgumentsRequest +{ + fn from(value: StartDebuggingRequestArgumentsRequest) -> Self { + match value { + StartDebuggingRequestArgumentsRequest::Launch => Self::Launch, + StartDebuggingRequestArgumentsRequest::Attach => Self::Attach, + } + } +} +impl TryFrom for extension::StartDebuggingRequestArguments { + type Error = anyhow::Error; + + fn try_from(value: StartDebuggingRequestArguments) -> Result { + Ok(Self { + configuration: serde_json::from_str(&value.configuration)?, + request: value.request.into(), + }) + } +} +impl From for extension::TcpArguments { + fn from(value: TcpArguments) -> Self { + Self { + host: value.host.into(), + port: value.port, + timeout: value.timeout, + } + } +} + +impl From for TcpArgumentsTemplate { + fn from(value: extension::TcpArgumentsTemplate) -> Self { + Self { + host: value.host.map(Ipv4Addr::to_bits), + port: value.port, + timeout: value.timeout, + } + } +} + +impl From for extension::TcpArgumentsTemplate { + fn from(value: TcpArgumentsTemplate) -> Self { + Self { + host: value.host.map(Ipv4Addr::from_bits), + port: value.port, + timeout: value.timeout, + } + } +} + +impl TryFrom for DebugTaskDefinition { + type Error = anyhow::Error; + fn try_from(value: extension::DebugTaskDefinition) -> Result { + Ok(Self { + label: value.label.to_string(), + adapter: value.adapter.to_string(), + config: value.config.to_string(), + tcp_connection: value.tcp_connection.map(Into::into), + }) + } +} + +impl From for DebugRequest { + fn from(value: task::DebugRequest) -> Self { + match value { + task::DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()), + task::DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()), + } + } +} + +impl From for task::DebugRequest { + fn from(value: DebugRequest) -> Self { + match value { + DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()), + DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()), + } + } +} + +impl From for LaunchRequest { + fn from(value: task::LaunchRequest) -> Self { + Self { + program: value.program, + cwd: value.cwd.map(|p| p.to_string_lossy().into_owned()), + args: value.args, + envs: value.env.into_iter().collect(), + } + } +} + +impl From for AttachRequest { + fn from(value: task::AttachRequest) -> Self { + Self { + process_id: value.process_id, + } + } +} + +impl From for task::LaunchRequest { + fn from(value: LaunchRequest) -> Self { + Self { + program: value.program, + cwd: value.cwd.map(|p| p.into()), + args: value.args, + env: value.envs.into_iter().collect(), + } + } +} +impl From for task::AttachRequest { + fn from(value: AttachRequest) -> Self { + Self { + process_id: value.process_id, + } + } +} + +impl From for DebugConfig { + fn from(value: ZedDebugConfig) -> Self { + Self { + label: value.label.into(), + adapter: value.adapter.into(), + request: value.request.into(), + stop_on_entry: value.stop_on_entry, + } + } +} +impl TryFrom for extension::DebugAdapterBinary { + type Error = anyhow::Error; + fn try_from(value: DebugAdapterBinary) -> Result { + Ok(Self { + command: value.command, + arguments: value.arguments, + envs: value.envs.into_iter().collect(), + cwd: value.cwd.map(|s| s.into()), + connection: value.connection.map(Into::into), + request_args: value.request_args.try_into()?, + }) + } +} + +impl From for extension::BuildTaskDefinition { + fn from(value: BuildTaskDefinition) -> Self { + match value { + BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), + BuildTaskDefinition::Template(build_task_template) => Self::Template { + task_template: build_task_template.template.into(), + locator_name: build_task_template.locator_name.map(SharedString::from), + }, + } + } +} + +impl From for BuildTaskDefinition { + fn from(value: extension::BuildTaskDefinition) -> Self { + match value { + extension::BuildTaskDefinition::ByName(name) => Self::ByName(name.into()), + extension::BuildTaskDefinition::Template { + task_template, + locator_name, + } => Self::Template(BuildTaskDefinitionTemplatePayload { + template: task_template.into(), + locator_name: locator_name.map(String::from), + }), + } + } +} +impl From for extension::BuildTaskTemplate { + fn from(value: BuildTaskTemplate) -> Self { + Self { + label: value.label, + command: value.command, + args: value.args, + env: value.env.into_iter().collect(), + cwd: value.cwd, + ..Default::default() + } + } +} +impl From for BuildTaskTemplate { + fn from(value: extension::BuildTaskTemplate) -> Self { + Self { + label: value.label, + command: value.command, + args: value.args, + env: value.env.into_iter().collect(), + cwd: value.cwd, + } + } +} + +impl TryFrom for extension::DebugScenario { + type Error = anyhow::Error; + + fn try_from(value: DebugScenario) -> std::result::Result { + Ok(Self { + adapter: value.adapter.into(), + label: value.label.into(), + build: value.build.map(Into::into), + config: serde_json::Value::from_str(&value.config)?, + tcp_connection: value.tcp_connection.map(Into::into), + }) + } +} + +impl From for DebugScenario { + fn from(value: extension::DebugScenario) -> Self { + Self { + adapter: value.adapter.into(), + label: value.label.into(), + build: value.build.map(Into::into), + config: value.config.to_string(), + tcp_connection: value.tcp_connection.map(Into::into), + } + } +} + +impl TryFrom for ResolvedTask { + type Error = anyhow::Error; + + fn try_from(value: SpawnInTerminal) -> Result { + Ok(Self { + label: value.label, + command: value.command.context("missing command")?, + args: value.args, + env: value.env.into_iter().collect(), + cwd: value.cwd.map(|s| { + let s = s.to_string_lossy(); + if cfg!(target_os = "windows") { + s.replace('\\', "/") + } else { + s.into_owned() + } + }), + }) + } +} + +impl From for extension::CodeLabel { + fn from(value: CodeLabel) -> Self { + Self { + code: value.code, + spans: value.spans.into_iter().map(Into::into).collect(), + filter_range: value.filter_range.into(), + } + } +} + +impl From for extension::CodeLabelSpan { + fn from(value: CodeLabelSpan) -> Self { + match value { + CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()), + CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()), + } + } +} + +impl From for extension::CodeLabelSpanLiteral { + fn from(value: CodeLabelSpanLiteral) -> Self { + Self { + text: value.text, + highlight_name: value.highlight_name, + } + } +} + +impl From for Completion { + fn from(value: extension::Completion) -> Self { + Self { + label: value.label, + label_details: value.label_details.map(Into::into), + detail: value.detail, + kind: value.kind.map(Into::into), + insert_text_format: value.insert_text_format.map(Into::into), + } + } +} + +impl From for CompletionLabelDetails { + fn from(value: extension::CompletionLabelDetails) -> Self { + Self { + detail: value.detail, + description: value.description, + } + } +} + +impl From for CompletionKind { + fn from(value: extension::CompletionKind) -> Self { + match value { + extension::CompletionKind::Text => Self::Text, + extension::CompletionKind::Method => Self::Method, + extension::CompletionKind::Function => Self::Function, + extension::CompletionKind::Constructor => Self::Constructor, + extension::CompletionKind::Field => Self::Field, + extension::CompletionKind::Variable => Self::Variable, + extension::CompletionKind::Class => Self::Class, + extension::CompletionKind::Interface => Self::Interface, + extension::CompletionKind::Module => Self::Module, + extension::CompletionKind::Property => Self::Property, + extension::CompletionKind::Unit => Self::Unit, + extension::CompletionKind::Value => Self::Value, + extension::CompletionKind::Enum => Self::Enum, + extension::CompletionKind::Keyword => Self::Keyword, + extension::CompletionKind::Snippet => Self::Snippet, + extension::CompletionKind::Color => Self::Color, + extension::CompletionKind::File => Self::File, + extension::CompletionKind::Reference => Self::Reference, + extension::CompletionKind::Folder => Self::Folder, + extension::CompletionKind::EnumMember => Self::EnumMember, + extension::CompletionKind::Constant => Self::Constant, + extension::CompletionKind::Struct => Self::Struct, + extension::CompletionKind::Event => Self::Event, + extension::CompletionKind::Operator => Self::Operator, + extension::CompletionKind::TypeParameter => Self::TypeParameter, + extension::CompletionKind::Other(value) => Self::Other(value), + } + } +} + +impl From for InsertTextFormat { + fn from(value: extension::InsertTextFormat) -> Self { + match value { + extension::InsertTextFormat::PlainText => Self::PlainText, + extension::InsertTextFormat::Snippet => Self::Snippet, + extension::InsertTextFormat::Other(value) => Self::Other(value), + } + } +} + +impl From for Symbol { + fn from(value: extension::Symbol) -> Self { + Self { + kind: value.kind.into(), + name: value.name, + } + } +} + +impl From for SymbolKind { + fn from(value: extension::SymbolKind) -> Self { + match value { + extension::SymbolKind::File => Self::File, + extension::SymbolKind::Module => Self::Module, + extension::SymbolKind::Namespace => Self::Namespace, + extension::SymbolKind::Package => Self::Package, + extension::SymbolKind::Class => Self::Class, + extension::SymbolKind::Method => Self::Method, + extension::SymbolKind::Property => Self::Property, + extension::SymbolKind::Field => Self::Field, + extension::SymbolKind::Constructor => Self::Constructor, + extension::SymbolKind::Enum => Self::Enum, + extension::SymbolKind::Interface => Self::Interface, + extension::SymbolKind::Function => Self::Function, + extension::SymbolKind::Variable => Self::Variable, + extension::SymbolKind::Constant => Self::Constant, + extension::SymbolKind::String => Self::String, + extension::SymbolKind::Number => Self::Number, + extension::SymbolKind::Boolean => Self::Boolean, + extension::SymbolKind::Array => Self::Array, + extension::SymbolKind::Object => Self::Object, + extension::SymbolKind::Key => Self::Key, + extension::SymbolKind::Null => Self::Null, + extension::SymbolKind::EnumMember => Self::EnumMember, + extension::SymbolKind::Struct => Self::Struct, + extension::SymbolKind::Event => Self::Event, + extension::SymbolKind::Operator => Self::Operator, + extension::SymbolKind::TypeParameter => Self::TypeParameter, + extension::SymbolKind::Other(value) => Self::Other(value), + } + } +} + +impl From for SlashCommand { + fn from(value: extension::SlashCommand) -> Self { + Self { + name: value.name, + description: value.description, + tooltip_text: value.tooltip_text, + requires_argument: value.requires_argument, + } + } +} + +impl From for extension::SlashCommandOutput { + fn from(value: SlashCommandOutput) -> Self { + Self { + text: value.text, + sections: value.sections.into_iter().map(Into::into).collect(), + } + } +} + +impl From for extension::SlashCommandOutputSection { + fn from(value: SlashCommandOutputSection) -> Self { + Self { + range: value.range.start as usize..value.range.end as usize, + label: value.label, + } + } +} + +impl From for extension::SlashCommandArgumentCompletion { + fn from(value: SlashCommandArgumentCompletion) -> Self { + Self { + label: value.label, + new_text: value.new_text, + run_command: value.run_command, + } + } +} + +impl TryFrom for extension::ContextServerConfiguration { + type Error = anyhow::Error; + + fn try_from(value: ContextServerConfiguration) -> Result { + let settings_schema: serde_json::Value = serde_json::from_str(&value.settings_schema) + .context("Failed to parse settings_schema")?; + + Ok(Self { + installation_instructions: value.installation_instructions, + default_settings: value.default_settings, + settings_schema, + }) + } +} + +impl HostKeyValueStore for WasmState { + async fn insert( + &mut self, + kv_store: Resource, + key: String, + value: String, + ) -> wasmtime::Result> { + let kv_store = self.table.get(&kv_store)?; + kv_store.insert(key, value).await.to_wasmtime_result() + } + + async fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of key-value stores. + Ok(()) + } +} + +impl HostProject for WasmState { + async fn worktree_ids( + &mut self, + project: Resource, + ) -> wasmtime::Result> { + let project = self.table.get(&project)?; + Ok(project.worktree_ids()) + } + + async fn drop(&mut self, _project: Resource) -> Result<()> { + // We only ever hand out borrows of projects. + Ok(()) + } +} + +impl HostWorktree for WasmState { + async fn id(&mut self, delegate: Resource>) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.id()) + } + + async fn root_path( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.root_path()) + } + + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .read_text_file(&RelPath::new(Path::new(&path), PathStyle::Posix)?) + .await + .map_err(|error| error.to_string())) + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.shell_env().await.into_iter().collect()) + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate.which(binary_name).await) + } + + async fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of worktrees. + Ok(()) + } +} + +impl common::Host for WasmState {} + +impl http_client::Host for WasmState { + async fn fetch( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result> { + maybe!(async { + let url = &request.url; + let request = convert_request(&request)?; + let mut response = self.host.http_client.send(request).await?; + + if response.status().is_client_error() || response.status().is_server_error() { + bail!("failed to fetch '{url}': status code {}", response.status()) + } + convert_response(&mut response).await + }) + .await + .to_wasmtime_result() + } + + async fn fetch_stream( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result, String>> { + let request = convert_request(&request)?; + let response = self.host.http_client.send(request); + maybe!(async { + let response = response.await?; + let stream = Arc::new(Mutex::new(response)); + let resource = self.table.push(stream)?; + Ok(resource) + }) + .await + .to_wasmtime_result() + } +} + +impl http_client::HostHttpResponseStream for WasmState { + async fn next_chunk( + &mut self, + resource: Resource, + ) -> wasmtime::Result>, String>> { + let stream = self.table.get(&resource)?.clone(); + maybe!(async move { + let mut response = stream.lock().await; + let mut buffer = vec![0; 8192]; // 8KB buffer + let bytes_read = response.body_mut().read(&mut buffer).await?; + if bytes_read == 0 { + Ok(None) + } else { + buffer.truncate(bytes_read); + Ok(Some(buffer)) + } + }) + .await + .to_wasmtime_result() + } + + async fn drop(&mut self, _resource: Resource) -> Result<()> { + Ok(()) + } +} + +impl From for ::http_client::Method { + fn from(value: http_client::HttpMethod) -> Self { + match value { + http_client::HttpMethod::Get => Self::GET, + http_client::HttpMethod::Post => Self::POST, + http_client::HttpMethod::Put => Self::PUT, + http_client::HttpMethod::Delete => Self::DELETE, + http_client::HttpMethod::Head => Self::HEAD, + http_client::HttpMethod::Options => Self::OPTIONS, + http_client::HttpMethod::Patch => Self::PATCH, + } + } +} + +fn convert_request( + extension_request: &http_client::HttpRequest, +) -> anyhow::Result<::http_client::Request> { + let mut request = ::http_client::Request::builder() + .method(::http_client::Method::from(extension_request.method)) + .uri(&extension_request.url) + .follow_redirects(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, + http_client::RedirectPolicy::FollowLimit(limit) => { + ::http_client::RedirectPolicy::FollowLimit(limit) + } + http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, + }); + for (key, value) in &extension_request.headers { + request = request.header(key, value); + } + let body = extension_request + .body + .clone() + .map(AsyncBody::from) + .unwrap_or_default(); + request.body(body).map_err(anyhow::Error::from) +} + +async fn convert_response( + response: &mut ::http_client::Response, +) -> anyhow::Result { + let mut extension_response = http_client::HttpResponse { + body: Vec::new(), + headers: Vec::new(), + }; + + for (key, value) in response.headers() { + extension_response + .headers + .push((key.to_string(), value.to_str().unwrap_or("").to_string())); + } + + response + .body_mut() + .read_to_end(&mut extension_response.body) + .await?; + + Ok(extension_response) +} + +impl nodejs::Host for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + self.host + .node_runtime + .binary_path() + .await + .map(|path| path.to_string_lossy().into_owned()) + .to_wasmtime_result() + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_package_latest_version(&package_name) + .await + .map(|v| v.to_string()) + .to_wasmtime_result() + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + self.host + .node_runtime + .npm_package_installed_version(&self.work_dir(), &package_name) + .await + .map(|option| option.map(|version| version.to_string())) + .to_wasmtime_result() + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + self.capability_granter + .grant_npm_install_package(&package_name)?; + + self.host + .node_runtime + .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl lsp::Host for WasmState {} + +impl From<::http_client::github::GithubRelease> for github::GithubRelease { + fn from(value: ::http_client::github::GithubRelease) -> Self { + Self { + version: value.tag_name, + assets: value.assets.into_iter().map(Into::into).collect(), + } + } +} + +impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { + fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { + Self { + name: value.name, + download_url: value.browser_download_url, + } + } +} + +impl github::Host for WasmState { + async fn latest_github_release( + &mut self, + repo: String, + options: github::GithubReleaseOptions, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::latest_github_release( + &repo, + options.require_assets, + options.pre_release, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } + + async fn github_release_by_tag_name( + &mut self, + repo: String, + tag: String, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::get_release_by_tag_name( + &repo, + &tag, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } +} + +impl platform::Host for WasmState { + async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { + Ok(( + match env::consts::OS { + "macos" => platform::Os::Mac, + "linux" => platform::Os::Linux, + "windows" => platform::Os::Windows, + _ => panic!("unsupported os"), + }, + match env::consts::ARCH { + "aarch64" => platform::Architecture::Aarch64, + "x86" => platform::Architecture::X86, + "x86_64" => platform::Architecture::X8664, + _ => panic!("unsupported architecture"), + }, + )) + } +} + +impl From for process::Output { + fn from(output: std::process::Output) -> Self { + Self { + status: output.status.code(), + stdout: output.stdout, + stderr: output.stderr, + } + } +} + +impl process::Host for WasmState { + async fn run_command( + &mut self, + command: process::Command, + ) -> wasmtime::Result> { + maybe!(async { + self.capability_granter + .grant_exec(&command.command, &command.args)?; + + let output = util::command::new_smol_command(command.command.as_str()) + .args(&command.args) + .envs(command.env) + .output() + .await?; + + Ok(output.into()) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl slash_command::Host for WasmState {} + +#[async_trait] +impl context_server::Host for WasmState {} + +impl dap::Host for WasmState { + async fn resolve_tcp_template( + &mut self, + template: TcpArgumentsTemplate, + ) -> wasmtime::Result> { + maybe!(async { + let (host, port, timeout) = + ::dap::configure_tcp_connection(task::TcpArgumentsTemplate { + port: template.port, + host: template.host.map(Ipv4Addr::from_bits), + timeout: template.timeout, + }) + .await?; + Ok(TcpArguments { + port, + host: host.to_bits(), + timeout, + }) + }) + .await + .to_wasmtime_result() + } +} + +impl ExtensionImports for WasmState { + async fn get_settings( + &mut self, + location: Option, + category: String, + key: Option, + ) -> wasmtime::Result> { + self.on_main_thread(|cx| { + async move { + let path = location.as_ref().and_then(|location| { + RelPath::new(Path::new(&location.path), PathStyle::Posix).ok() + }); + let location = path + .as_ref() + .zip(location.as_ref()) + .map(|(path, location)| ::settings::SettingsLocation { + worktree_id: WorktreeId::from_proto(location.worktree_id), + path, + }); + + cx.update(|cx| match category.as_str() { + "language" => { + let key = key.map(|k| LanguageName::new(&k)); + let settings = AllLanguageSettings::get(location, cx).language( + location, + key.as_ref(), + cx, + ); + Ok(serde_json::to_string(&settings::LanguageSettings { + tab_size: settings.tab_size, + })?) + } + "lsp" => { + let settings = key + .and_then(|key| { + ProjectSettings::get(location, cx) + .lsp + .get(&::lsp::LanguageServerName::from_proto(key)) + }) + .cloned() + .unwrap_or_default(); + Ok(serde_json::to_string(&settings::LspSettings { + binary: settings.binary.map(|binary| settings::CommandSettings { + path: binary.path, + arguments: binary.arguments, + env: binary.env.map(|env| env.into_iter().collect()), + }), + settings: settings.settings, + initialization_options: settings.initialization_options, + })?) + } + "context_servers" => { + let settings = key + .and_then(|key| { + ProjectSettings::get(location, cx) + .context_servers + .get(key.as_str()) + }) + .cloned() + .unwrap_or_else(|| { + project::project_settings::ContextServerSettings::default_extension( + ) + }); + + match settings { + project::project_settings::ContextServerSettings::Stdio { + enabled: _, + command, + } => Ok(serde_json::to_string(&settings::ContextServerSettings { + command: Some(settings::CommandSettings { + path: command.path.to_str().map(|path| path.to_string()), + arguments: Some(command.args), + env: command.env.map(|env| env.into_iter().collect()), + }), + settings: None, + })?), + project::project_settings::ContextServerSettings::Extension { + enabled: _, + settings, + } => Ok(serde_json::to_string(&settings::ContextServerSettings { + command: None, + settings: Some(settings), + })?), + project::project_settings::ContextServerSettings::Http { .. } => { + bail!("remote context server settings not supported in 0.6.0") + } + } + } + _ => { + bail!("Unknown settings category: {}", category); + } + }) + } + .boxed_local() + }) + .await? + .to_wasmtime_result() + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + let status = match status { + LanguageServerInstallationStatus::CheckingForUpdate => BinaryStatus::CheckingForUpdate, + LanguageServerInstallationStatus::Downloading => BinaryStatus::Downloading, + LanguageServerInstallationStatus::None => BinaryStatus::None, + LanguageServerInstallationStatus::Failed(error) => BinaryStatus::Failed { error }, + }; + + self.host + .proxy + .update_language_server_status(::lsp::LanguageServerName(server_name.into()), status); + + Ok(()) + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + maybe!(async { + let parsed_url = Url::parse(&url)?; + self.capability_granter.grant_download_file(&parsed_url)?; + + let path = PathBuf::from(path); + let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref()); + + self.host.fs.create_dir(&extension_work_dir).await?; + + let destination_path = self + .host + .writeable_path_from_extension(&self.manifest.id, &path)?; + + let mut response = self + .host + .http_client + .get(&url, Default::default(), true) + .await + .context("downloading release")?; + + anyhow::ensure!( + response.status().is_success(), + "download failed with status {}", + response.status() + ); + let body = BufReader::new(response.body_mut()); + + match file_type { + DownloadedFileType::Uncompressed => { + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::Gzip => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::GzipTar => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .extract_tar_file(&destination_path, Archive::new(body)) + .await?; + } + DownloadedFileType::Zip => { + futures::pin_mut!(body); + extract_zip(&destination_path, body) + .await + .with_context(|| format!("unzipping {path:?} archive"))?; + } + } + + Ok(()) + }) + .await + .to_wasmtime_result() + } + + async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { + let path = self + .host + .writeable_path_from_extension(&self.manifest.id, Path::new(&path))?; + + make_file_executable(&path) + .await + .with_context(|| format!("setting permissions for path {path:?}")) + .to_wasmtime_result() + } +} diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml index 87c76b684725dd9f88031d70c67bff76670cdcf5..707938a9eb83209149a261fbe4c8cf9d6ca4e91e 100644 --- a/crates/extensions_ui/Cargo.toml +++ b/crates/extensions_ui/Cargo.toml @@ -28,7 +28,7 @@ num-format.workspace = true picker.workspace = true project.workspace = true release_channel.workspace = true -semantic_version.workspace = true +semver.workspace = true serde.workspace = true settings.workspace = true smallvec.workspace = true diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index 5dcd1e210527ee89a35a3b89008a901cf1f9f036..7ad4c1540a419f0cdeedb2aeff7661aafac5ef4c 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -75,6 +75,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ ("vue", &["vue"]), ("wgsl", &["wgsl"]), ("wit", &["wit"]), + ("xml", &["xml"]), ("zig", &["zig"]), ]; diff --git a/crates/extensions_ui/src/extension_version_selector.rs b/crates/extensions_ui/src/extension_version_selector.rs index d38c27375f6c32324d4832d308768af8473869eb..17d293da76d4831e30af8ed7d26d2df4c7706716 100644 --- a/crates/extensions_ui/src/extension_version_selector.rs +++ b/crates/extensions_ui/src/extension_version_selector.rs @@ -8,7 +8,7 @@ use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{App, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, prelude::*}; use picker::{Picker, PickerDelegate}; use release_channel::ReleaseChannel; -use semantic_version::SemanticVersion; +use semver::Version; use settings::update_settings_file; use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*}; use util::ResultExt; @@ -60,8 +60,8 @@ impl ExtensionVersionSelectorDelegate { mut extension_versions: Vec, ) -> Self { extension_versions.sort_unstable_by(|a, b| { - let a_version = SemanticVersion::from_str(&a.manifest.version); - let b_version = SemanticVersion::from_str(&b.manifest.version); + let a_version = Version::from_str(&a.manifest.version); + let b_version = Version::from_str(&b.manifest.version); match (a_version, b_version) { (Ok(a_version), Ok(b_version)) => b_version.cmp(&a_version), diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 3a7e1a80dd348d97a54f1dce21794760a2399740..3dd4803ce17adb053f86f29e3724d58d479136c6 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -24,8 +24,9 @@ use settings::{Settings, SettingsContent}; use strum::IntoEnumIterator as _; use theme::ThemeSettings; use ui::{ - Banner, Chip, ContextMenu, Divider, PopoverMenu, ScrollableHandle, Switch, ToggleButton, - Tooltip, WithScrollbar, prelude::*, + Banner, Chip, ContextMenu, Divider, PopoverMenu, ScrollableHandle, Switch, ToggleButtonGroup, + ToggleButtonGroupSize, ToggleButtonGroupStyle, ToggleButtonSimple, Tooltip, WithScrollbar, + prelude::*, }; use vim_mode_setting::VimModeSetting; use workspace::{ @@ -228,8 +229,10 @@ enum Feature { AgentClaude, AgentCodex, AgentGemini, + ExtensionBasedpyright, ExtensionRuff, ExtensionTailwind, + ExtensionTy, Git, LanguageBash, LanguageC, @@ -250,8 +253,13 @@ fn keywords_by_feature() -> &'static BTreeMap> { (Feature::AgentClaude, vec!["claude", "claude code"]), (Feature::AgentCodex, vec!["codex", "codex cli"]), (Feature::AgentGemini, vec!["gemini", "gemini cli"]), + ( + Feature::ExtensionBasedpyright, + vec!["basedpyright", "pyright"], + ), (Feature::ExtensionRuff, vec!["ruff"]), (Feature::ExtensionTailwind, vec!["tail", "tailwind"]), + (Feature::ExtensionTy, vec!["ty"]), (Feature::Git, vec!["git"]), (Feature::LanguageBash, vec!["sh", "bash"]), (Feature::LanguageC, vec!["c", "clang"]), @@ -292,6 +300,7 @@ pub struct ExtensionsPage { workspace: WeakEntity, list: UniformListScrollHandle, is_fetching_extensions: bool, + fetch_failed: bool, filter: ExtensionFilter, remote_extension_entries: Vec, dev_extension_entries: Vec>, @@ -352,6 +361,7 @@ impl ExtensionsPage { workspace: workspace.weak_handle(), list: scroll_handle, is_fetching_extensions: false, + fetch_failed: false, filter: ExtensionFilter::All, dev_extension_entries: Vec::new(), filtered_remote_extension_indices: Vec::new(), @@ -478,6 +488,7 @@ impl ExtensionsPage { cx: &mut Context, ) { self.is_fetching_extensions = true; + self.fetch_failed = false; cx.notify(); let extension_store = ExtensionStore::global(cx); @@ -533,17 +544,31 @@ impl ExtensionsPage { }; let fetch_result = remote_extensions.await; - this.update(cx, |this, cx| { + + let result = this.update(cx, |this, cx| { cx.notify(); this.dev_extension_entries = dev_extensions; this.is_fetching_extensions = false; - this.remote_extension_entries = fetch_result?; - this.filter_extension_entries(cx); - if let Some(callback) = on_complete { - callback(this, cx); + + match fetch_result { + Ok(extensions) => { + this.fetch_failed = false; + this.remote_extension_entries = extensions; + this.filter_extension_entries(cx); + if let Some(callback) = on_complete { + callback(this, cx); + } + Ok(()) + } + Err(err) => { + this.fetch_failed = true; + this.filter_extension_entries(cx); + Err(err) + } } - anyhow::Ok(()) - })? + }); + + result? }) .detach_and_log_err(cx); } @@ -714,7 +739,7 @@ impl ExtensionsPage { extension: &ExtensionMetadata, cx: &mut Context, ) -> ExtensionCard { - let this = cx.entity(); + let this = cx.weak_entity(); let status = Self::extension_status(&extension.id, cx); let has_dev_extension = Self::dev_extension_exists(&extension.id, cx); @@ -805,37 +830,47 @@ impl ExtensionsPage { ) .child( h_flex() - .gap_1() .justify_between() .child( - Icon::new(IconName::Person) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(extension.manifest.authors.join(", ")) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), + h_flex() + .gap_1() + .child( + Icon::new(IconName::Person) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(extension.manifest.authors.join(", ")) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), + ), ) .child( h_flex() - .ml_auto() .gap_1() - .child( + .child({ + let repo_url_for_tooltip = repository_url.clone(); + IconButton::new( SharedString::from(format!("repository-{}", extension.id)), IconName::Github, ) .icon_size(IconSize::Small) - .on_click(cx.listener({ - let repository_url = repository_url.clone(); + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Visit Extension Repository", + None, + repo_url_for_tooltip.clone(), + cx, + ) + }) + .on_click(cx.listener( move |_, _, _, cx| { cx.open_url(&repository_url); - } - })) - .tooltip(Tooltip::text(repository_url)), - ) + }, + )) + }) .child( PopoverMenu::new(SharedString::from(format!( "more-{}", @@ -854,13 +889,15 @@ impl ExtensionsPage { y: px(2.0), }) .menu(move |window, cx| { - Some(Self::render_remote_extension_context_menu( - &this, - extension_id.clone(), - authors.clone(), - window, - cx, - )) + this.upgrade().map(|this| { + Self::render_remote_extension_context_menu( + &this, + extension_id.clone(), + authors.clone(), + window, + cx, + ) + }) }), ), ), @@ -1136,15 +1173,14 @@ impl ExtensionsPage { h_flex() .key_context(key_context) .h_8() - .flex_1() .min_w(rems_from_px(384.)) + .flex_1() .pl_1p5() .pr_2() - .py_1() .gap_2() .border_1() .border_color(editor_border) - .rounded_lg() + .rounded_md() .child(Icon::new(IconName::MagnifyingGlass).color(Color::Muted)) .child(self.render_text_input(&self.query_editor, cx)) } @@ -1267,7 +1303,9 @@ impl ExtensionsPage { let has_search = self.search_query(cx).is_some(); let message = if self.is_fetching_extensions { - "Loading extensions..." + "Loading extensions…" + } else if self.fetch_failed { + "Failed to load extensions. Please check your connection and try again." } else { match self.filter { ExtensionFilter::All => { @@ -1294,7 +1332,17 @@ impl ExtensionsPage { } }; - Label::new(message) + h_flex() + .py_4() + .gap_1p5() + .when(self.fetch_failed, |this| { + this.child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + }) + .child(Label::new(message)) } fn update_settings( @@ -1325,6 +1373,23 @@ impl ExtensionsPage { return; }; + if let Some(id) = search.strip_prefix("id:") { + self.upsells.clear(); + + let upsell = match id.to_lowercase().as_str() { + "ruff" => Some(Feature::ExtensionRuff), + "basedpyright" => Some(Feature::ExtensionBasedpyright), + "ty" => Some(Feature::ExtensionTy), + _ => None, + }; + + if let Some(upsell) = upsell { + self.upsells.insert(upsell); + } + + return; + } + let search = search.to_lowercase(); let search_terms = search .split_whitespace() @@ -1407,8 +1472,7 @@ impl ExtensionsPage { }, ); }, - )) - .color(ui::SwitchColor::Accent), + )), ), ), ) @@ -1443,6 +1507,12 @@ impl ExtensionsPage { false, cx, ), + Feature::ExtensionBasedpyright => self.render_feature_upsell_banner( + "Basedpyright (Python language server) support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/python#basedpyright".into(), + false, + cx, + ), Feature::ExtensionRuff => self.render_feature_upsell_banner( "Ruff (linter for Python) support is built-in to Zed!".into(), "https://zed.dev/docs/languages/python#code-formatting--linting".into(), @@ -1455,6 +1525,12 @@ impl ExtensionsPage { false, cx, ), + Feature::ExtensionTy => self.render_feature_upsell_banner( + "Ty (Python language server) support is built-in to Zed!".into(), + "https://zed.dev/docs/languages/python".into(), + false, + cx, + ), Feature::Git => self.render_feature_upsell_banner( "Zed comes with basic Git support—more features are coming in the future." .into(), @@ -1544,13 +1620,13 @@ impl Render for ExtensionsPage { .child( h_flex() .w_full() - .gap_2() + .gap_1p5() .justify_between() .child(Headline::new("Extensions").size(HeadlineSize::XLarge)) .child( Button::new("install-dev-extension", "Install Dev Extension") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) .on_click(|_event, window, cx| { window.dispatch_action(Box::new(InstallDevExtension), cx) }), @@ -1559,58 +1635,51 @@ impl Render for ExtensionsPage { .child( h_flex() .w_full() - .gap_4() .flex_wrap() + .gap_2() .child(self.render_search(cx)) .child( - h_flex() - .child( - ToggleButton::new("filter-all", "All") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .toggle_state(self.filter == ExtensionFilter::All) - .on_click(cx.listener(|this, _event, _, cx| { - this.filter = ExtensionFilter::All; - this.filter_extension_entries(cx); - this.scroll_to_top(cx); - })) - .tooltip(move |_, cx| { - Tooltip::simple("Show all extensions", cx) - }) - .first(), - ) - .child( - ToggleButton::new("filter-installed", "Installed") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .toggle_state(self.filter == ExtensionFilter::Installed) - .on_click(cx.listener(|this, _event, _, cx| { - this.filter = ExtensionFilter::Installed; - this.filter_extension_entries(cx); - this.scroll_to_top(cx); - })) - .tooltip(move |_, cx| { - Tooltip::simple("Show installed extensions", cx) - }) - .middle(), + div().child( + ToggleButtonGroup::single_row( + "filter-buttons", + [ + ToggleButtonSimple::new( + "All", + cx.listener(|this, _event, _, cx| { + this.filter = ExtensionFilter::All; + this.filter_extension_entries(cx); + this.scroll_to_top(cx); + }), + ), + ToggleButtonSimple::new( + "Installed", + cx.listener(|this, _event, _, cx| { + this.filter = ExtensionFilter::Installed; + this.filter_extension_entries(cx); + this.scroll_to_top(cx); + }), + ), + ToggleButtonSimple::new( + "Not Installed", + cx.listener(|this, _event, _, cx| { + this.filter = ExtensionFilter::NotInstalled; + this.filter_extension_entries(cx); + this.scroll_to_top(cx); + }), + ), + ], ) - .child( - ToggleButton::new("filter-not-installed", "Not Installed") - .style(ButtonStyle::Filled) - .size(ButtonSize::Large) - .toggle_state( - self.filter == ExtensionFilter::NotInstalled, - ) - .on_click(cx.listener(|this, _event, _, cx| { - this.filter = ExtensionFilter::NotInstalled; - this.filter_extension_entries(cx); - this.scroll_to_top(cx); - })) - .tooltip(move |_, cx| { - Tooltip::simple("Show not installed extensions", cx) - }) - .last(), - ), + .style(ToggleButtonGroupStyle::Outlined) + .size(ToggleButtonGroupSize::Custom(rems_from_px(30.))) // Perfectly matches the input + .label_size(LabelSize::Default) + .auto_width() + .selected_index(match self.filter { + ExtensionFilter::All => 0, + ExtensionFilter::Installed => 1, + ExtensionFilter::NotInstalled => 2, + }) + .into_any_element(), + ), ), ), ) @@ -1670,16 +1739,14 @@ impl Render for ExtensionsPage { } if count == 0 { - this.py_4() - .child(self.render_empty_state(cx)) - .into_any_element() + this.child(self.render_empty_state(cx)).into_any_element() } else { - let scroll_handle = self.list.clone(); + let scroll_handle = &self.list; this.child( uniform_list("entries", count, cx.processor(Self::render_extensions)) .flex_grow() .pb_4() - .track_scroll(scroll_handle.clone()), + .track_scroll(scroll_handle), ) .vertical_scrollbar_for(scroll_handle, window, cx) .into_any_element() diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 47b6f1230ac747c2633327d1be923d33388cf179..1768e43d1d0a88433d61c6390f912377c2ba55e3 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -1,11 +1,5 @@ use crate::FeatureFlag; -pub struct PredictEditsRateCompletionsFeatureFlag; - -impl FeatureFlag for PredictEditsRateCompletionsFeatureFlag { - const NAME: &'static str = "predict-edits-rate-completions"; -} - pub struct NotebookFeatureFlag; impl FeatureFlag for NotebookFeatureFlag { @@ -17,3 +11,15 @@ pub struct PanicFeatureFlag; impl FeatureFlag for PanicFeatureFlag { const NAME: &'static str = "panic"; } + +pub struct InlineAssistantUseToolFeatureFlag; + +impl FeatureFlag for InlineAssistantUseToolFeatureFlag { + const NAME: &'static str = "inline-assistant-use-tool"; +} + +pub struct AgentV2FeatureFlag; + +impl FeatureFlag for AgentV2FeatureFlag { + const NAME: &'static str = "agent-v2"; +} diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 6f64dc20d0b97f1b12fb627c72209df555e6f1a7..73b21bb828a598d5bbc53c0ecf4511988c30bc65 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1060,7 +1060,7 @@ impl FileFinderDelegate { ( filename.to_string(), Vec::new(), - prefix.display(path_style).to_string() + path_style.separator(), + prefix.display(path_style).to_string() + path_style.primary_separator(), Vec::new(), ) } else { @@ -1071,7 +1071,7 @@ impl FileFinderDelegate { .map_or(String::new(), |f| f.to_string_lossy().into_owned()), Vec::new(), entry_path.absolute.parent().map_or(String::new(), |path| { - path.to_string_lossy().into_owned() + path_style.separator() + path.to_string_lossy().into_owned() + path_style.primary_separator() }), Vec::new(), ) @@ -1713,7 +1713,7 @@ impl PickerDelegate for FileFinderDelegate { ui::IconPosition::End, Some(ToggleIncludeIgnored.boxed_clone()), move |window, cx| { - window.focus(&focus_handle); + window.focus(&focus_handle, cx); window.dispatch_action( ToggleIncludeIgnored.boxed_clone(), cx, diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 690265562e1c36e685574ec590819d8f513c128a..aeb9d794c2b4bc014bd332ed03dc8e5c3dda709b 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -1598,7 +1598,7 @@ async fn test_history_match_positions(cx: &mut gpui::TestAppContext) { assert_eq!(file_label.highlight_indices(), &[0, 1, 2]); assert_eq!( path_label.text(), - format!("test{}", PathStyle::local().separator()) + format!("test{}", PathStyle::local().primary_separator()) ); assert_eq!(path_label.highlight_indices(), &[] as &[usize]); }); @@ -3452,3 +3452,99 @@ async fn test_paths_with_starting_slash(cx: &mut TestAppContext) { assert_eq!(active_editor.read(cx).title(cx), "file1.txt"); }); } + +#[gpui::test] +async fn test_clear_navigation_history(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/src"), + json!({ + "test": { + "first.rs": "// First file", + "second.rs": "// Second file", + "third.rs": "// Third file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + workspace.update_in(cx, |_workspace, window, cx| window.focused(cx)); + + // Open some files to generate navigation history + open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await; + open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await; + let history_before_clear = + open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await; + + assert_eq!( + history_before_clear.len(), + 2, + "Should have history items before clearing" + ); + + // Verify that file finder shows history items + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("fir"); + picker.update(cx, |finder, _| { + let matches = collect_search_matches(finder); + assert!( + !matches.history.is_empty(), + "File finder should show history items before clearing" + ); + }); + workspace.update_in(cx, |_, window, cx| { + window.dispatch_action(menu::Cancel.boxed_clone(), cx); + }); + + // Verify navigation state before clear + workspace.update(cx, |workspace, cx| { + let pane = workspace.active_pane(); + pane.read(cx).can_navigate_backward() + }); + + // Clear navigation history + cx.dispatch_action(workspace::ClearNavigationHistory); + + // Verify that navigation is disabled immediately after clear + workspace.update(cx, |workspace, cx| { + let pane = workspace.active_pane(); + assert!( + !pane.read(cx).can_navigate_backward(), + "Should not be able to navigate backward after clearing history" + ); + assert!( + !pane.read(cx).can_navigate_forward(), + "Should not be able to navigate forward after clearing history" + ); + }); + + // Verify that file finder no longer shows history items + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("fir"); + picker.update(cx, |finder, _| { + let matches = collect_search_matches(finder); + assert!( + matches.history.is_empty(), + "File finder should not show history items after clearing" + ); + }); + workspace.update_in(cx, |_, window, cx| { + window.dispatch_action(menu::Cancel.boxed_clone(), cx); + }); + + // Verify history is empty by opening a new file + // (this should not show any previous history) + let history_after_clear = + open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await; + assert_eq!( + history_after_clear.len(), + 0, + "Should have no history items after clearing" + ); +} diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index f29c0e6cd20f423dd9073abced0182f272b588c9..f75d0ee99dc32bc1a1ab812328bba3d36fcb2953 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -44,8 +44,9 @@ impl OpenPathDelegate { tx: oneshot::Sender>>, lister: DirectoryLister, creating_path: bool, - path_style: PathStyle, + cx: &App, ) -> Self { + let path_style = lister.path_style(cx); Self { tx: Some(tx), lister, @@ -216,8 +217,7 @@ impl OpenPathPrompt { cx: &mut Context, ) { workspace.toggle_modal(window, cx, |window, cx| { - let delegate = - OpenPathDelegate::new(tx, lister.clone(), creating_path, PathStyle::local()); + let delegate = OpenPathDelegate::new(tx, lister.clone(), creating_path, cx); let picker = Picker::uniform_list(delegate, window, cx).width(rems(34.)); let query = lister.default_query(cx); picker.set_query(query, window, cx); @@ -399,7 +399,12 @@ impl PickerDelegate for OpenPathDelegate { } }) .unwrap_or(false); - if should_prepend_with_current_dir { + + let current_dir_in_new_entries = new_entries + .iter() + .any(|entry| &entry.path.string == current_dir); + + if should_prepend_with_current_dir && !current_dir_in_new_entries { new_entries.insert( 0, CandidateInfo { @@ -554,7 +559,7 @@ impl PickerDelegate for OpenPathDelegate { parent_path, candidate.path.string, if candidate.is_dir { - path_style.separator() + path_style.primary_separator() } else { "" } @@ -564,7 +569,7 @@ impl PickerDelegate for OpenPathDelegate { parent_path, candidate.path.string, if candidate.is_dir { - path_style.separator() + path_style.primary_separator() } else { "" } @@ -821,7 +826,13 @@ impl PickerDelegate for OpenPathDelegate { } fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - Arc::from(format!("[directory{}]filename.ext", self.path_style.separator()).as_str()) + Arc::from( + format!( + "[directory{}]filename.ext", + self.path_style.primary_separator() + ) + .as_str(), + ) } fn separators_after_indices(&self) -> Vec { diff --git a/crates/file_finder/src/open_path_prompt_tests.rs b/crates/file_finder/src/open_path_prompt_tests.rs index dea188034bfa7ae46f5b17c50424b40331fadb75..9af18c8a6bd82b389d4d18a997c3b5fe4a088730 100644 --- a/crates/file_finder/src/open_path_prompt_tests.rs +++ b/crates/file_finder/src/open_path_prompt_tests.rs @@ -5,7 +5,7 @@ use picker::{Picker, PickerDelegate}; use project::Project; use serde_json::json; use ui::rems; -use util::{path, paths::PathStyle}; +use util::path; use workspace::{AppState, Workspace}; use crate::OpenPathDelegate; @@ -37,7 +37,7 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); + let (picker, cx) = build_open_path_prompt(project, false, cx); insert_query(path!("sadjaoislkdjasldj"), &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), Vec::::new()); @@ -119,7 +119,7 @@ async fn test_open_path_prompt_completion(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); + let (picker, cx) = build_open_path_prompt(project, false, cx); // Confirm completion for the query "/root", since it's a directory, it should add a trailing slash. let query = path!("/root"); @@ -227,7 +227,7 @@ async fn test_open_path_prompt_on_windows(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); + let (picker, cx) = build_open_path_prompt(project, false, cx); // Support both forward and backward slashes. let query = "C:/root/"; @@ -295,56 +295,6 @@ async fn test_open_path_prompt_on_windows(cx: &mut TestAppContext) { ); } -#[gpui::test] -#[cfg_attr(not(target_os = "windows"), ignore)] -async fn test_open_path_prompt_on_windows_with_remote(cx: &mut TestAppContext) { - let app_state = init_test(cx); - app_state - .fs - .as_fake() - .insert_tree( - "/root", - json!({ - "a": "A", - "dir1": {}, - "dir2": {} - }), - ) - .await; - - let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await; - - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::Posix, cx); - - let query = "/root/"; - insert_query(query, &picker, cx).await; - assert_eq!( - collect_match_candidates(&picker, cx), - vec!["./", "a", "dir1", "dir2"] - ); - assert_eq!( - confirm_completion(query, 1, &picker, cx).unwrap(), - "/root/a" - ); - - // Confirm completion for the query "/root/d", selecting the second candidate "dir2", since it's a directory, it should add a trailing slash. - let query = "/root/d"; - insert_query(query, &picker, cx).await; - assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!( - confirm_completion(query, 1, &picker, cx).unwrap(), - "/root/dir2/" - ); - - let query = "/root/d"; - insert_query(query, &picker, cx).await; - assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!( - confirm_completion(query, 0, &picker, cx).unwrap(), - "/root/dir1/" - ); -} - #[gpui::test] async fn test_new_path_prompt(cx: &mut TestAppContext) { let app_state = init_test(cx); @@ -372,7 +322,7 @@ async fn test_new_path_prompt(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, true, PathStyle::local(), cx); + let (picker, cx) = build_open_path_prompt(project, true, cx); insert_query(path!("/root"), &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["root"]); @@ -406,16 +356,15 @@ fn init_test(cx: &mut TestAppContext) -> Arc { fn build_open_path_prompt( project: Entity, creating_path: bool, - path_style: PathStyle, cx: &mut TestAppContext, ) -> (Entity>, &mut VisualTestContext) { let (tx, _) = futures::channel::oneshot::channel(); let lister = project::DirectoryLister::Project(project.clone()); - let delegate = OpenPathDelegate::new(tx, lister.clone(), creating_path, path_style); let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); ( workspace.update_in(cx, |_, window, cx| { + let delegate = OpenPathDelegate::new(tx, lister.clone(), creating_path, cx); cx.new(|cx| { let picker = Picker::uniform_list(delegate, window, cx) .width(rems(34.)) diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 15093b3a5b5e18ce0ddca1e9c23350e1ac46d66e..52063eeddcc3aa74adae33f3a78c74ecb6b6f04c 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -33,6 +33,7 @@ tempfile.workspace = true text.workspace = true time.workspace = true util.workspace = true +is_executable = "1.0.5" [target.'cfg(target_os = "macos")'.dependencies] fsevent.workspace = true diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 97cd13d185817453c369356bdc60cbc1517bf1e1..be9b84ff6acd5e13080148f15103b8a21111de7a 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -3,7 +3,7 @@ use anyhow::{Context as _, Result, bail}; use collections::{HashMap, HashSet}; use futures::future::{self, BoxFuture, join_all}; use git::{ - Oid, + Oid, RunHook, blame::Blame, repository::{ AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository, @@ -23,6 +23,7 @@ use std::{ path::PathBuf, sync::{Arc, LazyLock}, }; +use text::LineEnding; use util::{paths::PathStyle, rel_path::RelPath}; pub static LOAD_INDEX_TEXT_TASK: LazyLock = LazyLock::new(TaskLabel::new); @@ -50,6 +51,8 @@ pub struct FakeGitRepositoryState { pub blames: HashMap, pub current_branch_name: Option, pub branches: HashSet, + /// List of remotes, keys are names and values are URLs + pub remotes: HashMap, pub simulated_index_write_error_message: Option, pub refs: HashMap, } @@ -68,6 +71,7 @@ impl FakeGitRepositoryState { refs: HashMap::from_iter([("HEAD".into(), "abc".into())]), merge_base_contents: Default::default(), oids: Default::default(), + remotes: HashMap::default(), } } } @@ -138,6 +142,7 @@ impl GitRepository for FakeGitRepository { path: RepoPath, content: Option, _env: Arc>, + _is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { self.with_state_async(true, move |state| { if let Some(message) = &state.simulated_index_write_error_message { @@ -151,8 +156,8 @@ impl GitRepository for FakeGitRepository { }) } - fn remote_url(&self, _name: &str) -> Option { - None + fn remote_url(&self, _name: &str) -> BoxFuture<'_, Option> { + async move { None }.boxed() } fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result> { @@ -196,6 +201,7 @@ impl GitRepository for FakeGitRepository { async { Ok(CommitDetails { sha: commit.into(), + message: "initial commit".into(), ..Default::default() }) } @@ -377,11 +383,18 @@ impl GitRepository for FakeGitRepository { Ok(state .branches .iter() - .map(|branch_name| Branch { - is_head: Some(branch_name) == current_branch.as_ref(), - ref_name: branch_name.into(), - most_recent_commit: None, - upstream: None, + .map(|branch_name| { + let ref_name = if branch_name.starts_with("refs/") { + branch_name.into() + } else { + format!("refs/heads/{branch_name}").into() + }; + Branch { + is_head: Some(branch_name) == current_branch.as_ref(), + ref_name, + most_recent_commit: None, + upstream: None, + } }) .collect()) }) @@ -431,7 +444,21 @@ impl GitRepository for FakeGitRepository { }) } - fn blame(&self, path: RepoPath, _content: Rope) -> BoxFuture<'_, Result> { + fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + if !state.branches.remove(&name) { + bail!("no such branch: {name}"); + } + Ok(()) + }) + } + + fn blame( + &self, + path: RepoPath, + _content: Rope, + _line_ending: LineEnding, + ) -> BoxFuture<'_, Result> { self.with_state_async(false, move |state| { state .blames @@ -441,6 +468,25 @@ impl GitRepository for FakeGitRepository { }) } + fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result> { + self.file_history_paginated(path, 0, None) + } + + fn file_history_paginated( + &self, + path: RepoPath, + _skip: usize, + _limit: Option, + ) -> BoxFuture<'_, Result> { + async move { + Ok(git::repository::FileHistory { + entries: Vec::new(), + path, + }) + } + .boxed() + } + fn stage_paths( &self, paths: Vec, @@ -529,7 +575,15 @@ impl GitRepository for FakeGitRepository { _askpass: AskPassDelegate, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { - unimplemented!() + async { Ok(()) }.boxed() + } + + fn run_hook( + &self, + _hook: RunHook, + _env: Arc>, + ) -> BoxFuture<'_, Result<()>> { + async { Ok(()) }.boxed() } fn push( @@ -566,7 +620,24 @@ impl GitRepository for FakeGitRepository { unimplemented!() } - fn get_remotes(&self, _branch: Option) -> BoxFuture<'_, Result>> { + fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { + self.with_state_async(false, move |state| { + let remotes = state + .remotes + .keys() + .map(|r| Remote { + name: r.clone().into(), + }) + .collect::>(); + Ok(remotes) + }) + } + + fn get_push_remote(&self, _branch: String) -> BoxFuture<'_, Result>> { + unimplemented!() + } + + fn get_branch_remote(&self, _branch: String) -> BoxFuture<'_, Result>> { unimplemented!() } @@ -643,6 +714,20 @@ impl GitRepository for FakeGitRepository { fn default_branch(&self) -> BoxFuture<'_, Result>> { async { Ok(Some("main".into())) }.boxed() } + + fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.remotes.insert(name, url); + Ok(()) + }) + } + + fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.remotes.remove(&name); + Ok(()) + }) + } } #[cfg(test)] diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 33cc83a7886349a537a87d4b6c8bb3f5211608fc..2cbbf61a21e145464e9dbec01ace3b5510709d0d 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -32,6 +32,7 @@ use std::mem::MaybeUninit; use async_tar::Archive; use futures::{AsyncRead, Stream, StreamExt, future::BoxFuture}; use git::repository::{GitRepository, RealGitRepository}; +use is_executable::IsExecutable; use rope::Rope; use serde::{Deserialize, Serialize}; use smol::io::AsyncWriteExt; @@ -192,6 +193,8 @@ pub struct CopyOptions { pub struct RenameOptions { pub overwrite: bool, pub ignore_if_exists: bool, + /// Whether to create parent directories if they do not exist. + pub create_parents: bool, } #[derive(Copy, Clone, Default)] @@ -208,6 +211,7 @@ pub struct Metadata { pub is_dir: bool, pub len: u64, pub is_fifo: bool, + pub is_executable: bool, } /// Filesystem modification time. The purpose of this newtype is to discourage use of operations @@ -421,6 +425,86 @@ impl RealFs { job_event_subscribers: Arc::new(Mutex::new(Vec::new())), } } + + #[cfg(target_os = "windows")] + fn canonicalize(path: &Path) -> Result { + let mut strip_prefix = None; + + let mut new_path = PathBuf::new(); + for component in path.components() { + match component { + std::path::Component::Prefix(_) => { + let component = component.as_os_str(); + let canonicalized = if component + .to_str() + .map(|e| e.ends_with("\\")) + .unwrap_or(false) + { + std::fs::canonicalize(component) + } else { + let mut component = component.to_os_string(); + component.push("\\"); + std::fs::canonicalize(component) + }?; + + let mut strip = PathBuf::new(); + for component in canonicalized.components() { + match component { + Component::Prefix(prefix_component) => { + match prefix_component.kind() { + std::path::Prefix::Verbatim(os_str) => { + strip.push(os_str); + } + std::path::Prefix::VerbatimUNC(host, share) => { + strip.push("\\\\"); + strip.push(host); + strip.push(share); + } + std::path::Prefix::VerbatimDisk(disk) => { + strip.push(format!("{}:", disk as char)); + } + _ => strip.push(component), + }; + } + _ => strip.push(component), + } + } + strip_prefix = Some(strip); + new_path.push(component); + } + std::path::Component::RootDir => { + new_path.push(component); + } + std::path::Component::CurDir => { + if strip_prefix.is_none() { + // unrooted path + new_path.push(component); + } + } + std::path::Component::ParentDir => { + if strip_prefix.is_some() { + // rooted path + new_path.pop(); + } else { + new_path.push(component); + } + } + std::path::Component::Normal(_) => { + if let Ok(link) = std::fs::read_link(new_path.join(component)) { + let link = match &strip_prefix { + Some(e) => link.strip_prefix(e).unwrap_or(&link), + None => &link, + }; + new_path.extend(link); + } else { + new_path.push(component); + } + } + } + } + + Ok(new_path) + } } #[async_trait::async_trait] @@ -508,6 +592,12 @@ impl Fs for RealFs { } } + if options.create_parents { + if let Some(parent) = target.parent() { + self.create_dir(parent).await?; + } + } + smol::fs::rename(source, target).await?; Ok(()) } @@ -562,6 +652,8 @@ impl Fs for RealFs { use objc::{class, msg_send, sel, sel_impl}; unsafe { + /// Allow NSString::alloc use here because it sets autorelease + #[allow(clippy::disallowed_methods)] unsafe fn ns_string(string: &str) -> id { unsafe { NSString::alloc(nil).init_str(string).autorelease() } } @@ -724,7 +816,7 @@ impl Fs for RealFs { } let file = smol::fs::File::create(path).await?; let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file); - for chunk in chunks(text, line_ending) { + for chunk in text::chunks_with_line_ending(text, line_ending) { writer.write_all(chunk.as_bytes()).await?; } writer.flush().await?; @@ -749,7 +841,13 @@ impl Fs for RealFs { let path = path.to_owned(); self.executor .spawn(async move { - std::fs::canonicalize(&path).with_context(|| format!("canonicalizing {path:?}")) + #[cfg(target_os = "windows")] + let result = Self::canonicalize(&path); + + #[cfg(not(target_os = "windows"))] + let result = std::fs::canonicalize(&path); + + result.with_context(|| format!("canonicalizing {path:?}")) }) .await } @@ -820,6 +918,12 @@ impl Fs for RealFs { #[cfg(unix)] let is_fifo = metadata.file_type().is_fifo(); + let path_buf = path.to_path_buf(); + let is_executable = self + .executor + .spawn(async move { path_buf.is_executable() }) + .await; + Ok(Some(Metadata { inode, mtime: MTime(metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH)), @@ -827,6 +931,7 @@ impl Fs for RealFs { is_symlink, is_dir: metadata.file_type().is_dir(), is_fifo, + is_executable, })) } @@ -2273,6 +2378,12 @@ impl Fs for FakeFs { let old_path = normalize_path(old_path); let new_path = normalize_path(new_path); + if options.create_parents { + if let Some(parent) = new_path.parent() { + self.create_dir(parent).await?; + } + } + let mut state = self.state.lock(); let moved_entry = state.write_path(&old_path, |e| { if let btree_map::Entry::Occupied(e) = e { @@ -2457,7 +2568,7 @@ impl Fs for FakeFs { async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> { self.simulate_random_delay().await; let path = normalize_path(path); - let content = chunks(text, line_ending).collect::(); + let content = text::chunks_with_line_ending(text, line_ending).collect::(); if let Some(path) = path.parent() { self.create_dir(path).await?; } @@ -2527,6 +2638,7 @@ impl Fs for FakeFs { is_dir: false, is_symlink, is_fifo: false, + is_executable: false, }, FakeFsEntry::Dir { inode, mtime, len, .. @@ -2537,6 +2649,7 @@ impl Fs for FakeFs { is_dir: true, is_symlink, is_fifo: false, + is_executable: false, }, FakeFsEntry::Symlink { .. } => unreachable!(), })) @@ -2673,25 +2786,6 @@ impl Fs for FakeFs { } } -fn chunks(rope: &Rope, line_ending: LineEnding) -> impl Iterator { - rope.chunks().flat_map(move |chunk| { - let mut newline = false; - let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str()); - chunk - .lines() - .flat_map(move |line| { - let ending = if newline { - Some(line_ending.as_str()) - } else { - None - }; - newline = true; - ending.into_iter().chain([line]) - }) - .chain(end_with_newline) - }) -} - pub fn normalize_path(path: &Path) -> PathBuf { let mut components = path.components().peekable(); let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { @@ -3310,4 +3404,83 @@ mod tests { let content = std::fs::read_to_string(&file_to_be_replaced).unwrap(); assert_eq!(content, "Hello"); } + + #[gpui::test] + #[cfg(target_os = "windows")] + async fn test_realfs_canonicalize(executor: BackgroundExecutor) { + use util::paths::SanitizedPath; + + let fs = RealFs { + bundled_git_binary_path: None, + executor, + next_job_id: Arc::new(AtomicUsize::new(0)), + job_event_subscribers: Arc::new(Mutex::new(Vec::new())), + }; + let temp_dir = TempDir::new().unwrap(); + let file = temp_dir.path().join("test (1).txt"); + let file = SanitizedPath::new(&file); + std::fs::write(&file, "test").unwrap(); + + let canonicalized = fs.canonicalize(file.as_path()).await; + assert!(canonicalized.is_ok()); + } + + #[gpui::test] + async fn test_rename(executor: BackgroundExecutor) { + let fs = FakeFs::new(executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { + "file_a.txt": "content a", + "file_b.txt": "content b" + } + }), + ) + .await; + + fs.rename( + Path::new(path!("/root/src/file_a.txt")), + Path::new(path!("/root/src/new/renamed_a.txt")), + RenameOptions { + create_parents: true, + ..Default::default() + }, + ) + .await + .unwrap(); + + // Assert that the `file_a.txt` file was being renamed and moved to a + // different directory that did not exist before. + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/src/file_b.txt")), + PathBuf::from(path!("/root/src/new/renamed_a.txt")), + ] + ); + + let result = fs + .rename( + Path::new(path!("/root/src/file_b.txt")), + Path::new(path!("/root/src/old/renamed_b.txt")), + RenameOptions { + create_parents: false, + ..Default::default() + }, + ) + .await; + + // Assert that the `file_b.txt` file was not renamed nor moved, as + // `create_parents` was set to `false`. + // different directory that did not exist before. + assert!(result.is_err()); + assert_eq!( + fs.files(), + vec![ + PathBuf::from(path!("/root/src/file_b.txt")), + PathBuf::from(path!("/root/src/new/renamed_a.txt")), + ] + ); + } } diff --git a/crates/fs/src/fs_watcher.rs b/crates/fs/src/fs_watcher.rs index 32be1112d0b235281d33dd14534ebb87d8a3bc55..18d5dbeeb9e82948aaa503e7268d39c5d1852a2b 100644 --- a/crates/fs/src/fs_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -72,8 +72,8 @@ impl Watcher for FsWatcher { } #[cfg(target_os = "linux")] { - log::trace!("path to watch is already watched: {path:?}"); if self.registrations.lock().contains_key(path) { + log::trace!("path to watch is already watched: {path:?}"); return Ok(()); } } diff --git a/crates/fsevent/src/fsevent.rs b/crates/fsevent/src/fsevent.rs index e4060f3ae06a8d9412baf1cd75a9503c1b6d359b..8af57c19ee242d62e3fe10fa4d4f3ea5cc945ebd 100644 --- a/crates/fsevent/src/fsevent.rs +++ b/crates/fsevent/src/fsevent.rs @@ -372,7 +372,9 @@ unsafe extern "C" { pub fn FSEventsGetCurrentEventId() -> u64; } -#[cfg(test)] +// These tests are disabled by default because they seem to be unresolvably flaky. +// Feel free to bring them back to help test this code +#[cfg(false)] mod tests { use super::*; use std::{fs, sync::mpsc, thread, time::Duration}; @@ -395,19 +397,19 @@ mod tests { thread::spawn(move || stream.run(move |events| tx.send(events.to_vec()).is_ok())); fs::write(path.join("new-file"), "").unwrap(); - let events = rx.recv_timeout(Duration::from_secs(2)).unwrap(); + let events = rx.recv_timeout(timeout()).unwrap(); let event = events.last().unwrap(); assert_eq!(event.path, path.join("new-file")); assert!(event.flags.contains(StreamFlags::ITEM_CREATED)); fs::remove_file(path.join("existing-file-5")).unwrap(); - let mut events = rx.recv_timeout(Duration::from_secs(2)).unwrap(); + let mut events = rx.recv_timeout(timeout()).unwrap(); let mut event = events.last().unwrap(); // we see this duplicate about 1/100 test runs. if event.path == path.join("new-file") && event.flags.contains(StreamFlags::ITEM_CREATED) { - events = rx.recv_timeout(Duration::from_secs(2)).unwrap(); + events = rx.recv_timeout(timeout()).unwrap(); event = events.last().unwrap(); } assert_eq!(event.path, path.join("existing-file-5")); @@ -440,13 +442,13 @@ mod tests { }); fs::write(path.join("new-file"), "").unwrap(); - let events = rx.recv_timeout(Duration::from_secs(2)).unwrap(); + let events = rx.recv_timeout(timeout()).unwrap(); let event = events.last().unwrap(); assert_eq!(event.path, path.join("new-file")); assert!(event.flags.contains(StreamFlags::ITEM_CREATED)); fs::remove_file(path.join("existing-file-5")).unwrap(); - let events = rx.recv_timeout(Duration::from_secs(2)).unwrap(); + let events = rx.recv_timeout(timeout()).unwrap(); let event = events.last().unwrap(); assert_eq!(event.path, path.join("existing-file-5")); assert!(event.flags.contains(StreamFlags::ITEM_REMOVED)); @@ -477,11 +479,11 @@ mod tests { }); fs::write(path.join("new-file"), "").unwrap(); - assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "running"); + assert_eq!(rx.recv_timeout(timeout()).unwrap(), "running"); // Dropping the handle causes `EventStream::run` to return. drop(handle); - assert_eq!(rx.recv_timeout(Duration::from_secs(2)).unwrap(), "stopped"); + assert_eq!(rx.recv_timeout(timeout()).unwrap(), "stopped"); } #[test] @@ -500,11 +502,14 @@ mod tests { } fn flush_historical_events() { - let duration = if std::env::var("CI").is_ok() { - Duration::from_secs(2) + thread::sleep(timeout()); + } + + fn timeout() -> Duration { + if std::env::var("CI").is_ok() { + Duration::from_secs(4) } else { Duration::from_millis(500) - }; - thread::sleep(duration); + } } } diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index eb844e349821394785bb61a34600f04a6fa985eb..782c9caca832d81fb6e4bce8f49b4f310664b292 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -96,7 +96,8 @@ impl<'a> Matcher<'a> { continue; } - let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len()); + let matrix_len = + self.query.len() * (lowercase_prefix.len() + lowercase_candidate_chars.len()); self.score_matrix.clear(); self.score_matrix.resize(matrix_len, None); self.best_position_matrix.clear(); @@ -596,4 +597,15 @@ mod tests { }) .collect() } + + /// Test for https://github.com/zed-industries/zed/issues/44324 + #[test] + fn test_recursive_score_match_index_out_of_bounds() { + let paths = vec!["İ/İ/İ/İ"]; + let query = "İ/İ"; + + // This panicked with "index out of bounds: the len is 21 but the index is 22" + let result = match_single_path_query(query, false, &paths); + let _ = result; + } } diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index b35f0c1ce6cec73995838eb82bf782d00f0129af..cce0e082840c4cd05d6e2b21eac0073d3eb7700f 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -107,7 +107,7 @@ pub fn match_fixed_path_set( .display(path_style) .chars() .collect::>(); - path_prefix_chars.extend(path_style.separator().chars()); + path_prefix_chars.extend(path_style.primary_separator().chars()); let lowercase_pfx = path_prefix_chars .iter() .map(|c| c.to_ascii_lowercase()) diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index e58b9cb7e0427bf3af1c88f473debba0b6f94f59..d6011de98b8c69837d16bf2a2211fc7632726230 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -1,14 +1,13 @@ +use crate::Oid; use crate::commit::get_messages; use crate::repository::RepoPath; -use crate::{GitRemote, Oid}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use futures::AsyncWriteExt; -use gpui::SharedString; use serde::{Deserialize, Serialize}; use std::process::Stdio; use std::{ops::Range, path::Path}; -use text::Rope; +use text::{LineEnding, Rope}; use time::OffsetDateTime; use time::UtcOffset; use time::macros::format_description; @@ -19,15 +18,6 @@ pub use git2 as libgit; pub struct Blame { pub entries: Vec, pub messages: HashMap, - pub remote_url: Option, -} - -#[derive(Clone, Debug, Default)] -pub struct ParsedCommitMessage { - pub message: SharedString, - pub permalink: Option, - pub pull_request: Option, - pub remote: Option, } impl Blame { @@ -36,9 +26,10 @@ impl Blame { working_directory: &Path, path: &RepoPath, content: &Rope, - remote_url: Option, + line_ending: LineEnding, ) -> Result { - let output = run_git_blame(git_binary, working_directory, path, content).await?; + let output = + run_git_blame(git_binary, working_directory, path, content, line_ending).await?; let mut entries = parse_git_blame(&output)?; entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); @@ -53,11 +44,7 @@ impl Blame { .await .context("failed to get commit messages")?; - Ok(Self { - entries, - messages, - remote_url, - }) + Ok(Self { entries, messages }) } } @@ -69,12 +56,12 @@ async fn run_git_blame( working_directory: &Path, path: &RepoPath, contents: &Rope, + line_ending: LineEnding, ) -> Result { let mut child = util::command::new_smol_command(git_binary) .current_dir(working_directory) .arg("blame") .arg("--incremental") - .arg("-w") .arg("--contents") .arg("-") .arg(path.as_unix_str()) @@ -89,7 +76,7 @@ async fn run_git_blame( .as_mut() .context("failed to get pipe to stdin of git blame command")?; - for chunk in contents.chunks() { + for chunk in text::chunks_with_line_ending(contents, line_ending) { stdin.write_all(chunk.as_bytes()).await?; } stdin.flush().await?; diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index ece1d76b8ae9c9f40f27178da1ef13fe1a78e659..1b450a3dffb9e9956e5b43aa2797ae02f90e731c 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -1,7 +1,52 @@ -use crate::{Oid, status::StatusCode}; +use crate::{ + BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, parse_git_remote_url, + status::StatusCode, +}; use anyhow::{Context as _, Result}; use collections::HashMap; -use std::path::Path; +use gpui::SharedString; +use std::{path::Path, sync::Arc}; + +#[derive(Clone, Debug, Default)] +pub struct ParsedCommitMessage { + pub message: SharedString, + pub permalink: Option, + pub pull_request: Option, + pub remote: Option, +} + +impl ParsedCommitMessage { + pub fn parse( + sha: String, + message: String, + remote_url: Option<&str>, + provider_registry: Option>, + ) -> Self { + if let Some((hosting_provider, remote)) = provider_registry + .and_then(|reg| remote_url.and_then(|url| parse_git_remote_url(reg, url))) + { + let pull_request = hosting_provider.extract_pull_request(&remote, &message); + Self { + message: message.into(), + permalink: Some( + hosting_provider + .build_commit_permalink(&remote, BuildCommitPermalinkParams { sha: &sha }), + ), + pull_request, + remote: Some(GitRemote { + host: hosting_provider, + owner: remote.owner.into(), + repo: remote.repo.into(), + }), + } + } else { + Self { + message: message.into(), + ..Default::default() + } + } + } +} pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result> { if shas.is_empty() { diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 50a1e1234ba3caeff729d37b6fa3022336b54e96..805d8d181ab7a434b565d38bdb2f802a8a3cda1a 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -23,6 +23,7 @@ pub const FSMONITOR_DAEMON: &str = "fsmonitor--daemon"; pub const LFS_DIR: &str = "lfs"; pub const COMMIT_MESSAGE: &str = "COMMIT_EDITMSG"; pub const INDEX_LOCK: &str = "index.lock"; +pub const REPO_EXCLUDE: &str = "info/exclude"; actions!( git, @@ -43,6 +44,8 @@ actions!( /// Shows git blame information for the current file. #[action(deprecated_aliases = ["editor::ToggleGitBlame"])] Blame, + /// Shows the git history for the current file. + FileHistory, /// Stages the current file. StageFile, /// Unstages the current file. @@ -225,3 +228,28 @@ impl From for usize { u64::from_ne_bytes(u64_bytes) as usize } } + +#[repr(i32)] +#[derive(Copy, Clone, Debug)] +pub enum RunHook { + PreCommit, +} + +impl RunHook { + pub fn as_str(&self) -> &str { + match self { + Self::PreCommit => "pre-commit", + } + } + + pub fn to_proto(&self) -> i32 { + *self as i32 + } + + pub fn from_proto(value: i32) -> Option { + match value { + 0 => Some(Self::PreCommit), + _ => None, + } + } +} diff --git a/crates/git/src/remote.rs b/crates/git/src/remote.rs index e9814afc51a4a24fd154d74d0be2387c28c59fa3..8fb44839848278a3a698d7f2562741f682f38e24 100644 --- a/crates/git/src/remote.rs +++ b/crates/git/src/remote.rs @@ -1,3 +1,4 @@ +use std::str::FromStr; use std::sync::LazyLock; use derive_more::Deref; @@ -11,7 +12,7 @@ pub struct RemoteUrl(Url); static USERNAME_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"^[0-9a-zA-Z\-_]+@").expect("Failed to create USERNAME_REGEX")); -impl std::str::FromStr for RemoteUrl { +impl FromStr for RemoteUrl { type Err = url::ParseError; fn from_str(input: &str) -> Result { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 2a1cd9478d3079716eda8234c02c8122b9381b38..c3dd0995ff83d4bfdd494e4b5c192ff5999c21f8 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,19 +1,22 @@ use crate::commit::parse_git_diff_name_status; use crate::stash::GitStash; use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff}; -use crate::{Oid, SHORT_SHA_LENGTH}; +use crate::{Oid, RunHook, SHORT_SHA_LENGTH}; use anyhow::{Context as _, Result, anyhow, bail}; use collections::HashMap; use futures::future::BoxFuture; use futures::io::BufWriter; use futures::{AsyncWriteExt, FutureExt as _, select_biased}; -use git2::BranchType; +use git2::{BranchType, ErrorCode}; use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Task}; use parking_lot::Mutex; use rope::Rope; use schemars::JsonSchema; use serde::Deserialize; use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader}; +use text::LineEnding; + +use std::collections::HashSet; use std::ffi::{OsStr, OsString}; use std::process::{ExitStatus, Stdio}; use std::{ @@ -55,6 +58,12 @@ impl Branch { self.ref_name.starts_with("refs/remotes/") } + pub fn remote_name(&self) -> Option<&str> { + self.ref_name + .strip_prefix("refs/remotes/") + .and_then(|stripped| stripped.split("/").next()) + } + pub fn tracking_status(&self) -> Option { self.upstream .as_ref() @@ -207,6 +216,22 @@ pub struct CommitDetails { pub author_name: SharedString, } +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct FileHistoryEntry { + pub sha: SharedString, + pub subject: SharedString, + pub message: SharedString, + pub commit_timestamp: i64, + pub author_name: SharedString, + pub author_email: SharedString, +} + +#[derive(Debug, Clone)] +pub struct FileHistory { + pub entries: Vec, + pub path: RepoPath, +} + #[derive(Debug)] pub struct CommitDiff { pub files: Vec, @@ -400,10 +425,11 @@ pub trait GitRepository: Send + Sync { path: RepoPath, content: Option, env: Arc>, + is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>>; /// Returns the URL of the remote with the given name. - fn remote_url(&self, name: &str) -> Option; + fn remote_url(&self, name: &str) -> BoxFuture<'_, Option>; /// Resolve a list of refs to SHAs. fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>>; @@ -434,6 +460,8 @@ pub trait GitRepository: Send + Sync { -> BoxFuture<'_, Result<()>>; fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>>; + fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>>; + fn worktrees(&self) -> BoxFuture<'_, Result>>; fn create_worktree( @@ -460,7 +488,19 @@ pub trait GitRepository: Send + Sync { fn show(&self, commit: String) -> BoxFuture<'_, Result>; fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result>; - fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result>; + fn blame( + &self, + path: RepoPath, + content: Rope, + line_ending: LineEnding, + ) -> BoxFuture<'_, Result>; + fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result>; + fn file_history_paginated( + &self, + path: RepoPath, + skip: usize, + limit: Option, + ) -> BoxFuture<'_, Result>; /// Returns the absolute path to the repository. For worktrees, this will be the path to the /// worktree's gitdir within the main repository (typically `.git/worktrees/`). @@ -485,6 +525,12 @@ pub trait GitRepository: Send + Sync { env: Arc>, ) -> BoxFuture<'_, Result<()>>; + fn run_hook( + &self, + hook: RunHook, + env: Arc>, + ) -> BoxFuture<'_, Result<()>>; + fn commit( &self, message: SharedString, @@ -552,7 +598,15 @@ pub trait GitRepository: Send + Sync { cx: AsyncApp, ) -> BoxFuture<'_, Result>; - fn get_remotes(&self, branch_name: Option) -> BoxFuture<'_, Result>>; + fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>>; + + fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>>; + + fn get_all_remotes(&self) -> BoxFuture<'_, Result>>; + + fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>>; + + fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>>; /// returns a list of remote branches that contain HEAD fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result>>; @@ -604,6 +658,7 @@ pub struct RealGitRepository { pub repository: Arc>, pub system_git_binary_path: Option, pub any_git_binary_path: PathBuf, + any_git_binary_help_output: Arc>>, executor: BackgroundExecutor, } @@ -622,6 +677,7 @@ impl RealGitRepository { system_git_binary_path, any_git_binary_path, executor, + any_git_binary_help_output: Arc::new(Mutex::new(None)), }) } @@ -632,6 +688,27 @@ impl RealGitRepository { .context("failed to read git work directory") .map(Path::to_path_buf) } + + async fn any_git_binary_help_output(&self) -> SharedString { + if let Some(output) = self.any_git_binary_help_output.lock().clone() { + return output; + } + let git_binary_path = self.any_git_binary_path.clone(); + let executor = self.executor.clone(); + let working_directory = self.working_directory(); + let output: SharedString = self + .executor + .spawn(async move { + GitBinary::new(git_binary_path, working_directory?, executor) + .run(["help", "-a"]) + .await + }) + .await + .unwrap_or_default() + .into(); + *self.any_git_binary_help_output.lock() = Some(output.clone()); + output + } } #[derive(Clone, Debug)] @@ -931,7 +1008,15 @@ impl GitRepository for RealGitRepository { index.read(false)?; const STAGE_NORMAL: i32 = 0; - let oid = match index.get_path(path.as_std_path(), STAGE_NORMAL) { + let path = path.as_std_path(); + // `RepoPath` contains a `RelPath` which normalizes `.` into an empty path + // `get_path` unwraps on empty paths though, so undo that normalization here + let path = if path.components().next().is_none() { + ".".as_ref() + } else { + path + }; + let oid = match index.get_path(path, STAGE_NORMAL) { Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, _ => return Ok(None), }; @@ -981,12 +1066,15 @@ impl GitRepository for RealGitRepository { path: RepoPath, content: Option, env: Arc>, + is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { let working_directory = self.working_directory(); let git_binary_path = self.any_git_binary_path.clone(); self.executor .spawn(async move { let working_directory = working_directory?; + let mode = if is_executable { "100755" } else { "100644" }; + if let Some(content) = content { let mut child = new_smol_command(&git_binary_path) .current_dir(&working_directory) @@ -1007,7 +1095,7 @@ impl GitRepository for RealGitRepository { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory) .envs(env.iter()) - .args(["update-index", "--add", "--cacheinfo", "100644", sha]) + .args(["update-index", "--add", "--cacheinfo", mode, sha]) .arg(path.as_unix_str()) .output() .await?; @@ -1038,10 +1126,16 @@ impl GitRepository for RealGitRepository { .boxed() } - fn remote_url(&self, name: &str) -> Option { - let repo = self.repository.lock(); - let remote = repo.find_remote(name).ok()?; - remote.url().map(|url| url.to_string()) + fn remote_url(&self, name: &str) -> BoxFuture<'_, Option> { + let repo = self.repository.clone(); + let name = name.to_owned(); + self.executor + .spawn(async move { + let repo = repo.lock(); + let remote = repo.find_remote(&name).ok()?; + remote.url().map(|url| url.to_string()) + }) + .boxed() } fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { @@ -1332,9 +1426,19 @@ impl GitRepository for RealGitRepository { branch } else if let Ok(revision) = repo.find_branch(&name, BranchType::Remote) { let (_, branch_name) = name.split_once("/").context("Unexpected branch format")?; + let revision = revision.get(); let branch_commit = revision.peel_to_commit()?; - let mut branch = repo.branch(&branch_name, &branch_commit, false)?; + let mut branch = match repo.branch(&branch_name, &branch_commit, false) { + Ok(branch) => branch, + Err(err) if err.code() == ErrorCode::Exists => { + repo.find_branch(&branch_name, BranchType::Local)? + } + Err(err) => { + return Err(err.into()); + } + }; + branch.set_upstream(Some(&name))?; branch } else { @@ -1350,7 +1454,6 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { let branch = branch.await?; - GitBinary::new(git_binary_path, working_directory?, executor) .run(&["checkout", &branch]) .await?; @@ -1400,25 +1503,131 @@ impl GitRepository for RealGitRepository { .boxed() } - fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result> { + fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { + let git_binary_path = self.any_git_binary_path.clone(); + let working_directory = self.working_directory(); + let executor = self.executor.clone(); + + self.executor + .spawn(async move { + GitBinary::new(git_binary_path, working_directory?, executor) + .run(&["branch", "-d", &name]) + .await?; + anyhow::Ok(()) + }) + .boxed() + } + + fn blame( + &self, + path: RepoPath, + content: Rope, + line_ending: LineEnding, + ) -> BoxFuture<'_, Result> { let working_directory = self.working_directory(); let git_binary_path = self.any_git_binary_path.clone(); + let executor = self.executor.clone(); - let remote_url = self - .remote_url("upstream") - .or_else(|| self.remote_url("origin")); + executor + .spawn(async move { + crate::blame::Blame::for_path( + &git_binary_path, + &working_directory?, + &path, + &content, + line_ending, + ) + .await + }) + .boxed() + } - async move { - crate::blame::Blame::for_path( - &git_binary_path, - &working_directory?, - &path, - &content, - remote_url, - ) - .await - } - .boxed() + fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result> { + self.file_history_paginated(path, 0, None) + } + + fn file_history_paginated( + &self, + path: RepoPath, + skip: usize, + limit: Option, + ) -> BoxFuture<'_, Result> { + let working_directory = self.working_directory(); + let git_binary_path = self.any_git_binary_path.clone(); + self.executor + .spawn(async move { + let working_directory = working_directory?; + // Use a unique delimiter with a hardcoded UUID to separate commits + // This essentially eliminates any chance of encountering the delimiter in actual commit data + let commit_delimiter = + concat!("<>",); + + let format_string = format!( + "--pretty=format:%H%x00%s%x00%B%x00%at%x00%an%x00%ae{}", + commit_delimiter + ); + + let mut args = vec!["--no-optional-locks", "log", "--follow", &format_string]; + + let skip_str; + let limit_str; + if skip > 0 { + skip_str = skip.to_string(); + args.push("--skip"); + args.push(&skip_str); + } + if let Some(n) = limit { + limit_str = n.to_string(); + args.push("-n"); + args.push(&limit_str); + } + + args.push("--"); + + let output = new_smol_command(&git_binary_path) + .current_dir(&working_directory) + .args(&args) + .arg(path.as_unix_str()) + .output() + .await?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("git log failed: {stderr}"); + } + + let stdout = std::str::from_utf8(&output.stdout)?; + let mut entries = Vec::new(); + + for commit_block in stdout.split(commit_delimiter) { + let commit_block = commit_block.trim(); + if commit_block.is_empty() { + continue; + } + + let fields: Vec<&str> = commit_block.split('\0').collect(); + if fields.len() >= 6 { + let sha = fields[0].trim().to_string().into(); + let subject = fields[1].trim().to_string().into(); + let message = fields[2].trim().to_string().into(); + let commit_timestamp = fields[3].trim().parse().unwrap_or(0); + let author_name = fields[4].trim().to_string().into(); + let author_email = fields[5].trim().to_string().into(); + + entries.push(FileHistoryEntry { + sha, + subject, + message, + commit_timestamp, + author_name, + author_email, + }); + } + } + + Ok(FileHistory { entries, path }) + }) + .boxed() } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { @@ -1636,6 +1845,8 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let git_binary_path = self.any_git_binary_path.clone(); let executor = self.executor.clone(); + // Note: Do not spawn this command on the background thread, it might pop open the credential helper + // which we want to block on. async move { let mut cmd = new_smol_command(git_binary_path); cmd.current_dir(&working_directory?) @@ -1643,6 +1854,7 @@ impl GitRepository for RealGitRepository { .args(["commit", "--quiet", "-m"]) .arg(&message.to_string()) .arg("--cleanup=strip") + .arg("--no-verify") .stdout(smol::process::Stdio::piped()) .stderr(smol::process::Stdio::piped()); @@ -1677,6 +1889,8 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + // Note: Do not spawn this command on the background thread, it might pop open the credential helper + // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't push")?; let working_directory = working_directory?; @@ -1712,6 +1926,8 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + // Note: Do not spawn this command on the background thread, it might pop open the credential helper + // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't pull")?; let mut command = new_smol_command(git_binary_path); @@ -1746,6 +1962,8 @@ impl GitRepository for RealGitRepository { let remote_name = format!("{}", fetch_options); let git_binary_path = self.system_git_binary_path.clone(); let executor = cx.background_executor().clone(); + // Note: Do not spawn this command on the background thread, it might pop open the credential helper + // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?; let mut command = new_smol_command(git_binary_path); @@ -1761,48 +1979,111 @@ impl GitRepository for RealGitRepository { .boxed() } - fn get_remotes(&self, branch_name: Option) -> BoxFuture<'_, Result>> { + fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>> { let working_directory = self.working_directory(); let git_binary_path = self.any_git_binary_path.clone(); self.executor .spawn(async move { let working_directory = working_directory?; - if let Some(branch_name) = branch_name { - let output = new_smol_command(&git_binary_path) - .current_dir(&working_directory) - .args(["config", "--get"]) - .arg(format!("branch.{}.remote", branch_name)) - .output() - .await?; + let output = new_smol_command(&git_binary_path) + .current_dir(&working_directory) + .args(["rev-parse", "--abbrev-ref"]) + .arg(format!("{branch}@{{push}}")) + .output() + .await?; + if !output.status.success() { + return Ok(None); + } + let remote_name = String::from_utf8_lossy(&output.stdout) + .split('/') + .next() + .map(|name| Remote { + name: name.trim().to_string().into(), + }); - if output.status.success() { - let remote_name = String::from_utf8_lossy(&output.stdout); + Ok(remote_name) + }) + .boxed() + } - return Ok(vec![Remote { - name: remote_name.trim().to_string().into(), - }]); - } + fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>> { + let working_directory = self.working_directory(); + let git_binary_path = self.any_git_binary_path.clone(); + self.executor + .spawn(async move { + let working_directory = working_directory?; + let output = new_smol_command(&git_binary_path) + .current_dir(&working_directory) + .args(["config", "--get"]) + .arg(format!("branch.{branch}.remote")) + .output() + .await?; + if !output.status.success() { + return Ok(None); } + let remote_name = String::from_utf8_lossy(&output.stdout); + return Ok(Some(Remote { + name: remote_name.trim().to_string().into(), + })); + }) + .boxed() + } + + fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { + let working_directory = self.working_directory(); + let git_binary_path = self.any_git_binary_path.clone(); + self.executor + .spawn(async move { + let working_directory = working_directory?; let output = new_smol_command(&git_binary_path) .current_dir(&working_directory) - .args(["remote"]) + .args(["remote", "-v"]) .output() .await?; anyhow::ensure!( output.status.success(), - "Failed to get remotes:\n{}", + "Failed to get all remotes:\n{}", String::from_utf8_lossy(&output.stderr) ); - let remote_names = String::from_utf8_lossy(&output.stdout) - .split('\n') - .filter(|name| !name.is_empty()) - .map(|name| Remote { - name: name.trim().to_string().into(), + let remote_names: HashSet = String::from_utf8_lossy(&output.stdout) + .lines() + .filter(|line| !line.is_empty()) + .filter_map(|line| { + let mut split_line = line.split_whitespace(); + let remote_name = split_line.next()?; + + Some(Remote { + name: remote_name.trim().to_string().into(), + }) }) .collect(); - Ok(remote_names) + + Ok(remote_names.into_iter().collect()) + }) + .boxed() + } + + fn remove_remote(&self, name: String) -> BoxFuture<'_, Result<()>> { + let repo = self.repository.clone(); + self.executor + .spawn(async move { + let repo = repo.lock(); + repo.remote_delete(&name)?; + + Ok(()) + }) + .boxed() + } + + fn create_remote(&self, name: String, url: String) -> BoxFuture<'_, Result<()>> { + let repo = self.repository.clone(); + self.executor + .spawn(async move { + let repo = repo.lock(); + repo.remote(&name, url.as_ref())?; + Ok(()) }) .boxed() } @@ -2037,6 +2318,55 @@ impl GitRepository for RealGitRepository { }) .boxed() } + + fn run_hook( + &self, + hook: RunHook, + env: Arc>, + ) -> BoxFuture<'_, Result<()>> { + let working_directory = self.working_directory(); + let repository = self.repository.clone(); + let git_binary_path = self.any_git_binary_path.clone(); + let executor = self.executor.clone(); + let help_output = self.any_git_binary_help_output(); + + // Note: Do not spawn these commands on the background thread, as this causes some git hooks to hang. + async move { + let working_directory = working_directory?; + if !help_output + .await + .lines() + .any(|line| line.trim().starts_with("hook ")) + { + let hook_abs_path = repository.lock().path().join("hooks").join(hook.as_str()); + if hook_abs_path.is_file() { + let output = new_smol_command(&hook_abs_path) + .envs(env.iter()) + .current_dir(&working_directory) + .output() + .await?; + + if !output.status.success() { + return Err(GitBinaryCommandError { + stdout: String::from_utf8_lossy(&output.stdout).into_owned(), + stderr: String::from_utf8_lossy(&output.stderr).into_owned(), + status: output.status, + } + .into()); + } + } + + return Ok(()); + } + + let git = GitBinary::new(git_binary_path, working_directory, executor) + .envs(HashMap::clone(&env)); + git.run(&["hook", "run", "--ignore-missing", hook.as_str()]) + .await?; + Ok(()) + } + .boxed() + } } fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { @@ -2387,22 +2717,37 @@ fn parse_branch_input(input: &str) -> Result> { continue; } let mut fields = line.split('\x00'); - let is_current_branch = fields.next().context("no HEAD")? == "*"; - let head_sha: SharedString = fields.next().context("no objectname")?.to_string().into(); - let parent_sha: SharedString = fields.next().context("no parent")?.to_string().into(); - let ref_name = fields.next().context("no refname")?.to_string().into(); - let upstream_name = fields.next().context("no upstream")?.to_string(); - let upstream_tracking = parse_upstream_track(fields.next().context("no upstream:track")?)?; - let commiterdate = fields.next().context("no committerdate")?.parse::()?; - let author_name = fields.next().context("no authorname")?.to_string().into(); - let subject: SharedString = fields - .next() - .context("no contents:subject")? - .to_string() - .into(); + let Some(head) = fields.next() else { + continue; + }; + let Some(head_sha) = fields.next().map(|f| f.to_string().into()) else { + continue; + }; + let Some(parent_sha) = fields.next().map(|f| f.to_string()) else { + continue; + }; + let Some(ref_name) = fields.next().map(|f| f.to_string().into()) else { + continue; + }; + let Some(upstream_name) = fields.next().map(|f| f.to_string()) else { + continue; + }; + let Some(upstream_tracking) = fields.next().and_then(|f| parse_upstream_track(f).ok()) + else { + continue; + }; + let Some(commiterdate) = fields.next().and_then(|f| f.parse::().ok()) else { + continue; + }; + let Some(author_name) = fields.next().map(|f| f.to_string().into()) else { + continue; + }; + let Some(subject) = fields.next().map(|f| f.to_string().into()) else { + continue; + }; branches.push(Branch { - is_head: is_current_branch, + is_head: head == "*", ref_name, most_recent_commit: Some(CommitSummary { sha: head_sha, @@ -2744,6 +3089,44 @@ mod tests { ) } + #[test] + fn test_branches_parsing_containing_refs_with_missing_fields() { + #[allow(clippy::octal_escapes)] + let input = " \090012116c03db04344ab10d50348553aa94f1ea0\0refs/heads/broken\n \0eb0cae33272689bd11030822939dd2701c52f81e\0895951d681e5561478c0acdd6905e8aacdfd2249\0refs/heads/dev\0\0\01762948725\0Zed\0Add feature\n*\0895951d681e5561478c0acdd6905e8aacdfd2249\0\0refs/heads/main\0\0\01762948695\0Zed\0Initial commit\n"; + + let branches = parse_branch_input(input).unwrap(); + assert_eq!(branches.len(), 2); + assert_eq!( + branches, + vec![ + Branch { + is_head: false, + ref_name: "refs/heads/dev".into(), + upstream: None, + most_recent_commit: Some(CommitSummary { + sha: "eb0cae33272689bd11030822939dd2701c52f81e".into(), + subject: "Add feature".into(), + commit_timestamp: 1762948725, + author_name: SharedString::new("Zed"), + has_parent: true, + }) + }, + Branch { + is_head: true, + ref_name: "refs/heads/main".into(), + upstream: None, + most_recent_commit: Some(CommitSummary { + sha: "895951d681e5561478c0acdd6905e8aacdfd2249".into(), + subject: "Initial commit".into(), + commit_timestamp: 1762948695, + author_name: SharedString::new("Zed"), + has_parent: false, + }) + } + ] + ) + } + impl RealGitRepository { /// Force a Git garbage collection on the repository. fn gc(&self) -> BoxFuture<'_, Result<()>> { diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index 851556151e285975cb1eb7d3d33244d7e11b5663..9480e0ec28c0ffa61c1126b2a627f22dc445d7d3 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -18,6 +18,7 @@ futures.workspace = true git.workspace = true gpui.workspace = true http_client.workspace = true +itertools.workspace = true regex.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/git_hosting_providers/src/git_hosting_providers.rs b/crates/git_hosting_providers/src/git_hosting_providers.rs index 6940ea382a1a21dbb3e97b55d74ee2489a1691ba..37cf5882059d7a274661f5a083a23e3f25e676ff 100644 --- a/crates/git_hosting_providers/src/git_hosting_providers.rs +++ b/crates/git_hosting_providers/src/git_hosting_providers.rs @@ -26,18 +26,18 @@ pub fn init(cx: &mut App) { provider_registry.register_hosting_provider(Arc::new(Gitee)); provider_registry.register_hosting_provider(Arc::new(Github::public_instance())); provider_registry.register_hosting_provider(Arc::new(Gitlab::public_instance())); - provider_registry.register_hosting_provider(Arc::new(Sourcehut)); + provider_registry.register_hosting_provider(Arc::new(SourceHut::public_instance())); } /// Registers additional Git hosting providers. /// /// These require information from the Git repository to construct, so their /// registration is deferred until we have a Git repository initialized. -pub fn register_additional_providers( +pub async fn register_additional_providers( provider_registry: Arc, repository: Arc, ) { - let Some(origin_url) = repository.remote_url("origin") else { + let Some(origin_url) = repository.remote_url("origin").await else { return; }; @@ -51,6 +51,8 @@ pub fn register_additional_providers( provider_registry.register_hosting_provider(Arc::new(gitea_self_hosted)); } else if let Ok(bitbucket_self_hosted) = Bitbucket::from_remote_url(&origin_url) { provider_registry.register_hosting_provider(Arc::new(bitbucket_self_hosted)); + } else if let Ok(sourcehut_self_hosted) = SourceHut::from_remote_url(&origin_url) { + provider_registry.register_hosting_provider(Arc::new(sourcehut_self_hosted)); } } diff --git a/crates/git_hosting_providers/src/providers/bitbucket.rs b/crates/git_hosting_providers/src/providers/bitbucket.rs index 0c30a13758a8339087ebb146f0029baee0d3ea7e..07c6898d4e0affc3bdde4d8290607897cf2cd5be 100644 --- a/crates/git_hosting_providers/src/providers/bitbucket.rs +++ b/crates/git_hosting_providers/src/providers/bitbucket.rs @@ -1,8 +1,14 @@ -use std::str::FromStr; use std::sync::LazyLock; - -use anyhow::{Result, bail}; +use std::{str::FromStr, sync::Arc}; + +use anyhow::{Context as _, Result, bail}; +use async_trait::async_trait; +use futures::AsyncReadExt; +use gpui::SharedString; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; +use itertools::Itertools as _; use regex::Regex; +use serde::Deserialize; use url::Url; use git::{ @@ -20,6 +26,42 @@ fn pull_request_regex() -> &'static Regex { &PULL_REQUEST_REGEX } +#[derive(Debug, Deserialize)] +struct CommitDetails { + author: Author, +} + +#[derive(Debug, Deserialize)] +struct Author { + user: Account, +} + +#[derive(Debug, Deserialize)] +struct Account { + links: AccountLinks, +} + +#[derive(Debug, Deserialize)] +struct AccountLinks { + avatar: Option, +} + +#[derive(Debug, Deserialize)] +struct Link { + href: String, +} + +#[derive(Debug, Deserialize)] +struct CommitDetailsSelfHosted { + author: AuthorSelfHosted, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct AuthorSelfHosted { + avatar_url: Option, +} + pub struct Bitbucket { name: String, base_url: Url, @@ -61,8 +103,60 @@ impl Bitbucket { .host_str() .is_some_and(|host| host != "bitbucket.org") } + + async fn fetch_bitbucket_commit_author( + &self, + repo_owner: &str, + repo: &str, + commit: &str, + client: &Arc, + ) -> Result> { + let Some(host) = self.base_url.host_str() else { + bail!("failed to get host from bitbucket base url"); + }; + let is_self_hosted = self.is_self_hosted(); + let url = if is_self_hosted { + format!( + "https://{host}/rest/api/latest/projects/{repo_owner}/repos/{repo}/commits/{commit}?avatarSize=128" + ) + } else { + format!("https://api.{host}/2.0/repositories/{repo_owner}/{repo}/commit/{commit}") + }; + + let request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching BitBucket commit details at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + if is_self_hosted { + serde_json::from_str::(body_str) + .map(|commit| commit.author.avatar_url) + } else { + serde_json::from_str::(body_str) + .map(|commit| commit.author.user.links.avatar.map(|link| link.href)) + } + .context("failed to deserialize BitBucket commit details") + } } +#[async_trait] impl GitHostingProvider for Bitbucket { fn name(&self) -> String { self.name.clone() @@ -73,7 +167,7 @@ impl GitHostingProvider for Bitbucket { } fn supports_avatars(&self) -> bool { - false + true } fn format_line_number(&self, line: u32) -> String { @@ -98,9 +192,16 @@ impl GitHostingProvider for Bitbucket { return None; } - let mut path_segments = url.path_segments()?; - let owner = path_segments.next()?; - let repo = path_segments.next()?.trim_end_matches(".git"); + let mut path_segments = url.path_segments()?.collect::>(); + let repo = path_segments.pop()?.trim_end_matches(".git"); + let owner = if path_segments.get(0).is_some_and(|v| *v == "scm") && path_segments.len() > 1 + { + // Skip the "scm" segment if it's not the only segment + // https://github.com/gitkraken/vscode-gitlens/blob/a6e3c6fbb255116507eaabaa9940c192ed7bb0e1/src/git/remotes/bitbucket-server.ts#L72-L74 + path_segments.into_iter().skip(1).join("/") + } else { + path_segments.into_iter().join("/") + }; Some(ParsedGitRemote { owner: owner.into(), @@ -176,6 +277,22 @@ impl GitHostingProvider for Bitbucket { Some(PullRequest { number, url }) } + + async fn commit_author_avatar_url( + &self, + repo_owner: &str, + repo: &str, + commit: SharedString, + http_client: Arc, + ) -> Result> { + let commit = commit.to_string(); + let avatar_url = self + .fetch_bitbucket_commit_author(repo_owner, repo, &commit, &http_client) + .await? + .map(|avatar_url| Url::parse(&avatar_url)) + .transpose()?; + Ok(avatar_url) + } } #[cfg(test)] @@ -264,6 +381,38 @@ mod tests { repo: "zed".into(), } ); + + // Test with "scm" in the path + let remote_url = "https://bitbucket.company.com/scm/zed-industries/zed.git"; + + let parsed_remote = Bitbucket::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + + // Test with only "scm" as owner + let remote_url = "https://bitbucket.company.com/scm/zed.git"; + + let parsed_remote = Bitbucket::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "scm".into(), + repo: "zed".into(), + } + ); } #[test] diff --git a/crates/git_hosting_providers/src/providers/sourcehut.rs b/crates/git_hosting_providers/src/providers/sourcehut.rs index 55bff551846b5f69bad8ccaeaccf3ad55868303f..41011b023bef01a06138ead26d93ba447d6a4ba1 100644 --- a/crates/git_hosting_providers/src/providers/sourcehut.rs +++ b/crates/git_hosting_providers/src/providers/sourcehut.rs @@ -1,5 +1,6 @@ use std::str::FromStr; +use anyhow::{Result, bail}; use url::Url; use git::{ @@ -7,15 +8,52 @@ use git::{ RemoteUrl, }; -pub struct Sourcehut; +use crate::get_host_from_git_remote_url; -impl GitHostingProvider for Sourcehut { +pub struct SourceHut { + name: String, + base_url: Url, +} + +impl SourceHut { + pub fn new(name: &str, base_url: Url) -> Self { + Self { + name: name.to_string(), + base_url, + } + } + + pub fn public_instance() -> Self { + Self::new("SourceHut", Url::parse("https://git.sr.ht").unwrap()) + } + + pub fn from_remote_url(remote_url: &str) -> Result { + let host = get_host_from_git_remote_url(remote_url)?; + if host == "git.sr.ht" { + bail!("the SourceHut instance is not self-hosted"); + } + + // TODO: detecting self hosted instances by checking whether "sourcehut" is in the url or not + // is not very reliable. See https://github.com/zed-industries/zed/issues/26393 for more + // information. + if !host.contains("sourcehut") { + bail!("not a SourceHut URL"); + } + + Ok(Self::new( + "SourceHut Self-Hosted", + Url::parse(&format!("https://{}", host))?, + )) + } +} + +impl GitHostingProvider for SourceHut { fn name(&self) -> String { - "SourceHut".to_string() + self.name.clone() } fn base_url(&self) -> Url { - Url::parse("https://git.sr.ht").unwrap() + self.base_url.clone() } fn supports_avatars(&self) -> bool { @@ -34,7 +72,7 @@ impl GitHostingProvider for Sourcehut { let url = RemoteUrl::from_str(url).ok()?; let host = url.host_str()?; - if host != "git.sr.ht" { + if host != self.base_url.host_str()? { return None; } @@ -96,7 +134,7 @@ mod tests { #[test] fn test_parse_remote_url_given_ssh_url() { - let parsed_remote = Sourcehut + let parsed_remote = SourceHut::public_instance() .parse_remote_url("git@git.sr.ht:~zed-industries/zed") .unwrap(); @@ -111,7 +149,7 @@ mod tests { #[test] fn test_parse_remote_url_given_ssh_url_with_git_suffix() { - let parsed_remote = Sourcehut + let parsed_remote = SourceHut::public_instance() .parse_remote_url("git@git.sr.ht:~zed-industries/zed.git") .unwrap(); @@ -126,7 +164,7 @@ mod tests { #[test] fn test_parse_remote_url_given_https_url() { - let parsed_remote = Sourcehut + let parsed_remote = SourceHut::public_instance() .parse_remote_url("https://git.sr.ht/~zed-industries/zed") .unwrap(); @@ -139,9 +177,63 @@ mod tests { ); } + #[test] + fn test_parse_remote_url_given_self_hosted_ssh_url() { + let remote_url = "git@sourcehut.org:~zed-industries/zed"; + + let parsed_remote = SourceHut::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_ssh_url_with_git_suffix() { + let remote_url = "git@sourcehut.org:~zed-industries/zed.git"; + + let parsed_remote = SourceHut::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_https_url() { + let remote_url = "https://sourcehut.org/~zed-industries/zed"; + + let parsed_remote = SourceHut::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + #[test] fn test_build_sourcehut_permalink() { - let permalink = Sourcehut.build_permalink( + let permalink = SourceHut::public_instance().build_permalink( ParsedGitRemote { owner: "zed-industries".into(), repo: "zed".into(), @@ -159,7 +251,7 @@ mod tests { #[test] fn test_build_sourcehut_permalink_with_git_suffix() { - let permalink = Sourcehut.build_permalink( + let permalink = SourceHut::public_instance().build_permalink( ParsedGitRemote { owner: "zed-industries".into(), repo: "zed.git".into(), @@ -175,9 +267,49 @@ mod tests { assert_eq!(permalink.to_string(), expected_url.to_string()) } + #[test] + fn test_build_sourcehut_self_hosted_permalink() { + let permalink = SourceHut::from_remote_url("https://sourcehut.org/~zed-industries/zed") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new( + "faa6f979be417239b2e070dbbf6392b909224e0b", + &repo_path("crates/editor/src/git/permalink.rs"), + None, + ), + ); + + let expected_url = "https://sourcehut.org/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + + #[test] + fn test_build_sourcehut_self_hosted_permalink_with_git_suffix() { + let permalink = SourceHut::from_remote_url("https://sourcehut.org/~zed-industries/zed.git") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed.git".into(), + }, + BuildPermalinkParams::new( + "faa6f979be417239b2e070dbbf6392b909224e0b", + &repo_path("crates/editor/src/git/permalink.rs"), + None, + ), + ); + + let expected_url = "https://sourcehut.org/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + #[test] fn test_build_sourcehut_permalink_with_single_line_selection() { - let permalink = Sourcehut.build_permalink( + let permalink = SourceHut::public_instance().build_permalink( ParsedGitRemote { owner: "zed-industries".into(), repo: "zed".into(), @@ -195,7 +327,7 @@ mod tests { #[test] fn test_build_sourcehut_permalink_with_multi_line_selection() { - let permalink = Sourcehut.build_permalink( + let permalink = SourceHut::public_instance().build_permalink( ParsedGitRemote { owner: "zed-industries".into(), repo: "zed".into(), @@ -210,4 +342,44 @@ mod tests { let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; assert_eq!(permalink.to_string(), expected_url.to_string()) } + + #[test] + fn test_build_sourcehut_self_hosted_permalink_with_single_line_selection() { + let permalink = SourceHut::from_remote_url("https://sourcehut.org/~zed-industries/zed") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new( + "faa6f979be417239b2e070dbbf6392b909224e0b", + &repo_path("crates/editor/src/git/permalink.rs"), + Some(6..6), + ), + ); + + let expected_url = "https://sourcehut.org/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + + #[test] + fn test_build_sourcehut_self_hosted_permalink_with_multi_line_selection() { + let permalink = SourceHut::from_remote_url("https://sourcehut.org/~zed-industries/zed") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new( + "faa6f979be417239b2e070dbbf6392b909224e0b", + &repo_path("crates/editor/src/git/permalink.rs"), + Some(23..47), + ), + ); + + let expected_url = "https://sourcehut.org/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } } diff --git a/crates/git_hosting_providers/src/settings.rs b/crates/git_hosting_providers/src/settings.rs index 9bf6c1022b04cc60b0fbaead5177f9fe8e6e0280..95243cbe4e4ba68249ba89b948a5ed2644909364 100644 --- a/crates/git_hosting_providers/src/settings.rs +++ b/crates/git_hosting_providers/src/settings.rs @@ -8,7 +8,7 @@ use settings::{ use url::Url; use util::ResultExt as _; -use crate::{Bitbucket, Github, Gitlab}; +use crate::{Bitbucket, Forgejo, Gitea, Github, Gitlab, SourceHut}; pub(crate) fn init(cx: &mut App) { init_git_hosting_provider_settings(cx); @@ -46,6 +46,11 @@ fn update_git_hosting_providers_from_settings(cx: &mut App) { } GitHostingProviderKind::Github => Arc::new(Github::new(&provider.name, url)) as _, GitHostingProviderKind::Gitlab => Arc::new(Gitlab::new(&provider.name, url)) as _, + GitHostingProviderKind::Gitea => Arc::new(Gitea::new(&provider.name, url)) as _, + GitHostingProviderKind::Forgejo => Arc::new(Forgejo::new(&provider.name, url)) as _, + GitHostingProviderKind::SourceHut => { + Arc::new(SourceHut::new(&provider.name, url)) as _ + } }) }); diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 326424f28d46c9802439293165eae9708d63b064..c88244a036767be0ef862e74faa2113d54125443 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -13,7 +13,6 @@ name = "git_ui" path = "src/git_ui.rs" [features] -default = [] test-support = ["multi_buffer/test-support"] [dependencies] @@ -44,12 +43,14 @@ notifications.workspace = true panel.workspace = true picker.workspace = true project.workspace = true +prompt_store.workspace = true recent_projects.workspace = true remote.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +smol.workspace = true strum.workspace = true telemetry.workspace = true theme.workspace = true @@ -61,18 +62,24 @@ watch.workspace = true workspace.workspace = true zed_actions.workspace = true zeroize.workspace = true - +ztracing.workspace = true +tracing.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true [dev-dependencies] ctor.workspace = true editor = { workspace = true, features = ["test-support"] } +git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } +rand.workspace = true settings = { workspace = true, features = ["test-support"] } unindent.workspace = true workspace = { workspace = true, features = ["test-support"] } zlog.workspace = true + +[package.metadata.cargo-machete] +ignored = ["tracing"] diff --git a/crates/git_ui/src/blame_ui.rs b/crates/git_ui/src/blame_ui.rs index fc26f4608a38027e6abd4db122e713cc9ff4dc56..d4d8750a18ee6efbd90a38722043450c6ec61358 100644 --- a/crates/git_ui/src/blame_ui.rs +++ b/crates/git_ui/src/blame_ui.rs @@ -3,10 +3,7 @@ use crate::{ commit_view::CommitView, }; use editor::{BlameRenderer, Editor, hover_markdown_style}; -use git::{ - blame::{BlameEntry, ParsedCommitMessage}, - repository::CommitSummary, -}; +use git::{blame::BlameEntry, commit::ParsedCommitMessage, repository::CommitSummary}; use gpui::{ ClipboardItem, Entity, Hsla, MouseButton, ScrollHandle, Subscription, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, @@ -47,14 +44,17 @@ impl BlameRenderer for GitBlameRenderer { let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED); let avatar = if ProjectSettings::get_global(cx).git.blame.show_avatar { - CommitAvatar::new( - &blame_entry.sha.to_string().into(), - details.as_ref().and_then(|it| it.remote.as_ref()), + Some( + CommitAvatar::new( + &blame_entry.sha.to_string().into(), + details.as_ref().and_then(|it| it.remote.as_ref()), + ) + .render(window, cx), ) - .render(window, cx) } else { None }; + Some( div() .mr_2() @@ -64,7 +64,7 @@ impl BlameRenderer for GitBlameRenderer { .w_full() .gap_2() .justify_between() - .font_family(style.font().family) + .font(style.font()) .line_height(style.line_height) .text_color(cx.theme().status().hint) .child( @@ -80,7 +80,10 @@ impl BlameRenderer for GitBlameRenderer { .on_mouse_down(MouseButton::Right, { let blame_entry = blame_entry.clone(); let details = details.clone(); + let editor = editor.clone(); move |event, window, cx| { + cx.stop_propagation(); + deploy_blame_entry_context_menu( &blame_entry, details.as_ref(), @@ -101,22 +104,25 @@ impl BlameRenderer for GitBlameRenderer { repository.downgrade(), workspace.clone(), None, + None, window, cx, ) } }) - .hoverable_tooltip(move |_window, cx| { - cx.new(|cx| { - CommitTooltip::blame_entry( - &blame_entry, - details.clone(), - repository.clone(), - workspace.clone(), - cx, - ) + .when(!editor.read(cx).has_mouse_context_menu(), |el| { + el.hoverable_tooltip(move |_window, cx| { + cx.new(|cx| { + CommitTooltip::blame_entry( + &blame_entry, + details.clone(), + repository.clone(), + workspace.clone(), + cx, + ) + }) + .into() }) - .into() }), ) .into_any(), @@ -147,7 +153,7 @@ impl BlameRenderer for GitBlameRenderer { h_flex() .id("inline-blame") .w_full() - .font_family(style.font().family) + .font(style.font()) .text_color(cx.theme().status().hint) .line_height(style.line_height) .child(Icon::new(IconName::FileGit).color(Color::Hint)) @@ -197,9 +203,6 @@ impl BlameRenderer for GitBlameRenderer { let link_color = cx.theme().colors().text_accent; let markdown_style = { let mut style = hover_markdown_style(window, cx); - if let Some(code_block) = &style.code_block.text { - style.base_text_style.refine(code_block); - } style.link.refine(&TextStyleRefinement { color: Some(link_color), underline: Some(UnderlineStyle { @@ -260,7 +263,7 @@ impl BlameRenderer for GitBlameRenderer { .flex_wrap() .border_b_1() .border_color(cx.theme().colors().border_variant) - .children(avatar) + .child(avatar) .child(author) .when(!author_email.is_empty(), |this| { this.child( @@ -325,6 +328,7 @@ impl BlameRenderer for GitBlameRenderer { repository.downgrade(), workspace.clone(), None, + None, window, cx, ); @@ -365,6 +369,7 @@ impl BlameRenderer for GitBlameRenderer { repository.downgrade(), workspace, None, + None, window, cx, ) @@ -396,6 +401,7 @@ fn deploy_blame_entry_context_menu( }); editor.update(cx, move |editor, cx| { + editor.hide_blame_popover(false, cx); editor.deploy_mouse_context_menu(position, context_menu, window, cx); cx.notify(); }); diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 3ae9059b2a12f178931a5271b92c5fdf44f319d4..4db37e91b8720e51ff0416cc471842483ab1d0ca 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -1,12 +1,14 @@ use anyhow::Context as _; +use editor::Editor; use fuzzy::StringMatchCandidate; use collections::HashSet; use git::repository::Branch; +use gpui::http_client::Url; use gpui::{ - App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, - IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, SharedString, Styled, - Subscription, Task, Window, rems, + Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, + InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, + SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate, PickerEditorPosition}; use project::git_store::Repository; @@ -14,13 +16,30 @@ use project::project_settings::ProjectSettings; use settings::Settings; use std::sync::Arc; use time::OffsetDateTime; -use ui::{HighlightedLabel, ListItem, ListItemSpacing, Tooltip, prelude::*}; +use ui::{ + Divider, HighlightedLabel, KeyBinding, ListHeader, ListItem, ListItemSpacing, Tooltip, + prelude::*, +}; use util::ResultExt; use workspace::notifications::DetachAndPromptErr; use workspace::{ModalView, Workspace}; +use crate::{branch_picker, git_panel::show_error_toast}; + +actions!( + branch_picker, + [ + /// Deletes the selected git branch or remote. + DeleteBranch, + /// Filter the list of remotes + FilterRemotes + ] +); + pub fn register(workspace: &mut Workspace) { - workspace.register_action(open); + workspace.register_action(|workspace, branch: &zed_actions::git::Branch, window, cx| { + open(workspace, branch, window, cx); + }); workspace.register_action(switch); workspace.register_action(checkout_branch); } @@ -49,21 +68,30 @@ pub fn open( window: &mut Window, cx: &mut Context, ) { + let workspace_handle = workspace.weak_handle(); let repository = workspace.project().read(cx).active_repository(cx); let style = BranchListStyle::Modal; workspace.toggle_modal(window, cx, |window, cx| { - BranchList::new(repository, style, rems(34.), window, cx) + BranchList::new(workspace_handle, repository, style, rems(34.), window, cx) }) } pub fn popover( + workspace: WeakEntity, repository: Option>, window: &mut Window, cx: &mut App, ) -> Entity { cx.new(|cx| { - let list = BranchList::new(repository, BranchListStyle::Popover, rems(20.), window, cx); - list.focus_handle(cx).focus(window); + let list = BranchList::new( + workspace, + repository, + BranchListStyle::Popover, + rems(20.), + window, + cx, + ); + list.focus_handle(cx).focus(window, cx); list }) } @@ -77,11 +105,13 @@ enum BranchListStyle { pub struct BranchList { width: Rems, pub picker: Entity>, + picker_focus_handle: FocusHandle, _subscription: Subscription, } impl BranchList { fn new( + workspace: WeakEntity, repository: Option>, style: BranchListStyle, width: Rems, @@ -148,8 +178,12 @@ impl BranchList { }) .detach_and_log_err(cx); - let delegate = BranchListDelegate::new(repository, style); + let delegate = BranchListDelegate::new(workspace, repository, style, cx); let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); + let picker_focus_handle = picker.focus_handle(cx); + picker.update(cx, |picker, _| { + picker.delegate.focus_handle = picker_focus_handle.clone(); + }); let _subscription = cx.subscribe(&picker, |_, _, _, cx| { cx.emit(DismissEvent); @@ -157,6 +191,7 @@ impl BranchList { Self { picker, + picker_focus_handle, width, _subscription, } @@ -171,13 +206,40 @@ impl BranchList { self.picker .update(cx, |picker, _| picker.delegate.modifiers = ev.modifiers) } + + fn handle_delete( + &mut self, + _: &branch_picker::DeleteBranch, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + picker + .delegate + .delete_at(picker.delegate.selected_index, window, cx) + }) + } + + fn handle_filter( + &mut self, + _: &branch_picker::FilterRemotes, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + picker.delegate.branch_filter = picker.delegate.branch_filter.invert(); + picker.update_matches(picker.query(cx), window, cx); + picker.refresh_placeholder(window, cx); + cx.notify(); + }); + } } impl ModalView for BranchList {} impl EventEmitter for BranchList {} impl Focusable for BranchList { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.picker_focus_handle.clone() } } @@ -187,6 +249,8 @@ impl Render for BranchList { .key_context("GitBranchSelector") .w(self.width) .on_modifiers_changed(cx.listener(Self::handle_modifiers_changed)) + .on_action(cx.listener(Self::handle_delete)) + .on_action(cx.listener(Self::handle_filter)) .child(self.picker.clone()) .on_mouse_down_out({ cx.listener(move |this, _, window, cx| { @@ -198,15 +262,72 @@ impl Render for BranchList { } } -#[derive(Debug, Clone)] -struct BranchEntry { - branch: Branch, - positions: Vec, - is_new: bool, +#[derive(Debug, Clone, PartialEq)] +enum Entry { + Branch { + branch: Branch, + positions: Vec, + }, + NewUrl { + url: String, + }, + NewBranch { + name: String, + }, + NewRemoteName { + name: String, + url: SharedString, + }, +} + +impl Entry { + fn as_branch(&self) -> Option<&Branch> { + match self { + Entry::Branch { branch, .. } => Some(branch), + _ => None, + } + } + + fn name(&self) -> &str { + match self { + Entry::Branch { branch, .. } => branch.name(), + Entry::NewUrl { url, .. } => url.as_str(), + Entry::NewBranch { name, .. } => name.as_str(), + Entry::NewRemoteName { name, .. } => name.as_str(), + } + } + + #[cfg(test)] + fn is_new_url(&self) -> bool { + matches!(self, Self::NewUrl { .. }) + } + + #[cfg(test)] + fn is_new_branch(&self) -> bool { + matches!(self, Self::NewBranch { .. }) + } +} + +#[derive(Clone, Copy, PartialEq)] +enum BranchFilter { + /// Show both local and remote branches. + All, + /// Only show remote branches. + Remote, +} + +impl BranchFilter { + fn invert(&self) -> Self { + match self { + BranchFilter::All => BranchFilter::Remote, + BranchFilter::Remote => BranchFilter::All, + } + } } pub struct BranchListDelegate { - matches: Vec, + workspace: WeakEntity, + matches: Vec, all_branches: Option>, default_branch: Option, repo: Option>, @@ -214,11 +335,32 @@ pub struct BranchListDelegate { selected_index: usize, last_query: String, modifiers: Modifiers, + branch_filter: BranchFilter, + state: PickerState, + focus_handle: FocusHandle, +} + +#[derive(Debug)] +enum PickerState { + /// When we display list of branches/remotes + List, + /// When we set an url to create a new remote + NewRemote, + /// When we confirm the new remote url (after NewRemote) + CreateRemote(SharedString), + /// When we set a new branch to create + NewBranch, } impl BranchListDelegate { - fn new(repo: Option>, style: BranchListStyle) -> Self { + fn new( + workspace: WeakEntity, + repo: Option>, + style: BranchListStyle, + cx: &mut Context, + ) -> Self { Self { + workspace, matches: vec![], repo, style, @@ -227,6 +369,9 @@ impl BranchListDelegate { selected_index: 0, last_query: Default::default(), modifiers: Default::default(), + branch_filter: BranchFilter::All, + state: PickerState::List, + focus_handle: cx.focus_handle(), } } @@ -255,13 +400,189 @@ impl BranchListDelegate { }); cx.emit(DismissEvent); } + + fn create_remote( + &self, + remote_name: String, + remote_url: String, + window: &mut Window, + cx: &mut Context>, + ) { + let Some(repo) = self.repo.clone() else { + return; + }; + + let receiver = repo.update(cx, |repo, _| repo.create_remote(remote_name, remote_url)); + + cx.background_spawn(async move { receiver.await? }) + .detach_and_prompt_err("Failed to create remote", window, cx, |e, _, _cx| { + Some(e.to_string()) + }); + cx.emit(DismissEvent); + } + + fn delete_at(&self, idx: usize, window: &mut Window, cx: &mut Context>) { + let Some(entry) = self.matches.get(idx).cloned() else { + return; + }; + let Some(repo) = self.repo.clone() else { + return; + }; + + let workspace = self.workspace.clone(); + + cx.spawn_in(window, async move |picker, cx| { + let mut is_remote = false; + let result = match &entry { + Entry::Branch { branch, .. } => match branch.remote_name() { + Some(remote_name) => { + is_remote = true; + repo.update(cx, |repo, _| repo.remove_remote(remote_name.to_string()))? + .await? + } + None => { + repo.update(cx, |repo, _| repo.delete_branch(branch.name().to_string()))? + .await? + } + }, + _ => { + log::error!("Failed to delete remote: wrong entry to delete"); + return Ok(()); + } + }; + + if let Err(e) = result { + if is_remote { + log::error!("Failed to delete remote: {}", e); + } else { + log::error!("Failed to delete branch: {}", e); + } + + if let Some(workspace) = workspace.upgrade() { + cx.update(|_window, cx| { + if is_remote { + show_error_toast( + workspace, + format!("remote remove {}", entry.name()), + e, + cx, + ) + } else { + show_error_toast( + workspace, + format!("branch -d {}", entry.name()), + e, + cx, + ) + } + })?; + } + + return Ok(()); + } + + picker.update_in(cx, |picker, _, cx| { + picker.delegate.matches.retain(|e| e != &entry); + + if let Entry::Branch { branch, .. } = &entry { + if let Some(all_branches) = &mut picker.delegate.all_branches { + all_branches.retain(|e| e.ref_name != branch.ref_name); + } + } + + if picker.delegate.matches.is_empty() { + picker.delegate.selected_index = 0; + } else if picker.delegate.selected_index >= picker.delegate.matches.len() { + picker.delegate.selected_index = picker.delegate.matches.len() - 1; + } + + cx.notify(); + })?; + + anyhow::Ok(()) + }) + .detach(); + } } impl PickerDelegate for BranchListDelegate { type ListItem = ListItem; fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Select branch…".into() + match self.state { + PickerState::List | PickerState::NewRemote | PickerState::NewBranch => { + match self.branch_filter { + BranchFilter::All => "Select branch or remote…", + BranchFilter::Remote => "Select remote…", + } + } + PickerState::CreateRemote(_) => "Enter a name for this remote…", + } + .into() + } + + fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { + match self.state { + PickerState::CreateRemote(_) => { + Some(SharedString::new_static("Remote name can't be empty")) + } + _ => None, + } + } + + fn render_editor( + &self, + editor: &Entity, + _window: &mut Window, + _cx: &mut Context>, + ) -> Div { + let focus_handle = self.focus_handle.clone(); + + v_flex() + .when( + self.editor_position() == PickerEditorPosition::End, + |this| this.child(Divider::horizontal()), + ) + .child( + h_flex() + .overflow_hidden() + .flex_none() + .h_9() + .px_2p5() + .child(editor.clone()) + .when( + self.editor_position() == PickerEditorPosition::End, + |this| { + let tooltip_label = match self.branch_filter { + BranchFilter::All => "Filter Remote Branches", + BranchFilter::Remote => "Show All Branches", + }; + + this.gap_1().justify_between().child({ + IconButton::new("filter-remotes", IconName::Filter) + .toggle_state(self.branch_filter == BranchFilter::Remote) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + tooltip_label, + &branch_picker::FilterRemotes, + &focus_handle, + cx, + ) + }) + .on_click(|_click, window, cx| { + window.dispatch_action( + branch_picker::FilterRemotes.boxed_clone(), + cx, + ); + }) + }) + }, + ), + ) + .when( + self.editor_position() == PickerEditorPosition::Start, + |this| this.child(Divider::horizontal()), + ) } fn editor_position(&self) -> PickerEditorPosition { @@ -298,26 +619,38 @@ impl PickerDelegate for BranchListDelegate { return Task::ready(()); }; - const RECENT_BRANCHES_COUNT: usize = 10; + let branch_filter = self.branch_filter; cx.spawn_in(window, async move |picker, cx| { - let mut matches: Vec = if query.is_empty() { - all_branches + let branch_matches_filter = |branch: &Branch| match branch_filter { + BranchFilter::All => true, + BranchFilter::Remote => branch.is_remote(), + }; + + let mut matches: Vec = if query.is_empty() { + let mut matches: Vec = all_branches .into_iter() - .filter(|branch| !branch.is_remote()) - .take(RECENT_BRANCHES_COUNT) - .map(|branch| BranchEntry { + .filter(|branch| branch_matches_filter(branch)) + .map(|branch| Entry::Branch { branch, positions: Vec::new(), - is_new: false, }) - .collect() + .collect(); + + // Keep the existing recency sort within each group, but show local branches first. + matches.sort_by_key(|entry| entry.as_branch().is_some_and(|b| b.is_remote())); + + matches } else { - let candidates = all_branches + let branches = all_branches + .iter() + .filter(|branch| branch_matches_filter(branch)) + .collect::>(); + let candidates = branches .iter() .enumerate() .map(|(ix, branch)| StringMatchCandidate::new(ix, branch.name())) .collect::>(); - fuzzy::match_strings( + let mut matches: Vec = fuzzy::match_strings( &candidates, &query, true, @@ -328,31 +661,59 @@ impl PickerDelegate for BranchListDelegate { ) .await .into_iter() - .map(|candidate| BranchEntry { - branch: all_branches[candidate.candidate_id].clone(), + .map(|candidate| Entry::Branch { + branch: branches[candidate.candidate_id].clone(), positions: candidate.positions, - is_new: false, }) - .collect() + .collect(); + + // Keep fuzzy-relevance ordering within local/remote groups, but show locals first. + matches.sort_by_key(|entry| entry.as_branch().is_some_and(|b| b.is_remote())); + + matches }; picker .update(cx, |picker, _| { + if let PickerState::CreateRemote(url) = &picker.delegate.state { + let query = query.replace(' ', "-"); + if !query.is_empty() { + picker.delegate.matches = vec![Entry::NewRemoteName { + name: query.clone(), + url: url.clone(), + }]; + picker.delegate.selected_index = 0; + } else { + picker.delegate.matches = Vec::new(); + picker.delegate.selected_index = 0; + } + picker.delegate.last_query = query; + return; + } + if !query.is_empty() - && !matches - .first() - .is_some_and(|entry| entry.branch.name() == query) + && !matches.first().is_some_and(|entry| entry.name() == query) { let query = query.replace(' ', "-"); - matches.push(BranchEntry { - branch: Branch { - ref_name: format!("refs/heads/{query}").into(), - is_head: false, - upstream: None, - most_recent_commit: None, - }, - positions: Vec::new(), - is_new: true, - }) + let is_url = query.trim_start_matches("git@").parse::().is_ok(); + let entry = if is_url { + Entry::NewUrl { url: query } + } else { + Entry::NewBranch { name: query } + }; + // Only transition to NewBranch/NewRemote states when we only show their list item + // Otherwise, stay in List state so footer buttons remain visible + picker.delegate.state = if matches.is_empty() { + if is_url { + PickerState::NewRemote + } else { + PickerState::NewBranch + } + } else { + PickerState::List + }; + matches.push(entry); + } else { + picker.delegate.state = PickerState::List; } let delegate = &mut picker.delegate; delegate.matches = matches; @@ -372,52 +733,74 @@ impl PickerDelegate for BranchListDelegate { let Some(entry) = self.matches.get(self.selected_index()) else { return; }; - if entry.is_new { - let from_branch = if secondary { - self.default_branch.clone() - } else { - None - }; - self.create_branch( - from_branch, - entry.branch.name().to_owned().into(), - window, - cx, - ); - return; - } - let current_branch = self.repo.as_ref().map(|repo| { - repo.read_with(cx, |repo, _| { - repo.branch.as_ref().map(|branch| branch.ref_name.clone()) - }) - }); + match entry { + Entry::Branch { branch, .. } => { + let current_branch = self.repo.as_ref().map(|repo| { + repo.read_with(cx, |repo, _| { + repo.branch.as_ref().map(|branch| branch.ref_name.clone()) + }) + }); - if current_branch - .flatten() - .is_some_and(|current_branch| current_branch == entry.branch.ref_name) - { - cx.emit(DismissEvent); - return; - } + if current_branch + .flatten() + .is_some_and(|current_branch| current_branch == branch.ref_name) + { + cx.emit(DismissEvent); + return; + } - let Some(repo) = self.repo.clone() else { - return; - }; + let Some(repo) = self.repo.clone() else { + return; + }; - let branch = entry.branch.clone(); - cx.spawn(async move |_, cx| { - repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))? - .await??; + let branch = branch.clone(); + cx.spawn(async move |_, cx| { + repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))? + .await??; - anyhow::Ok(()) - }) - .detach_and_prompt_err("Failed to change branch", window, cx, |_, _, _| None); + anyhow::Ok(()) + }) + .detach_and_prompt_err( + "Failed to change branch", + window, + cx, + |_, _, _| None, + ); + } + Entry::NewUrl { url } => { + self.state = PickerState::CreateRemote(url.clone().into()); + self.matches = Vec::new(); + self.selected_index = 0; + + cx.defer_in(window, |picker, window, cx| { + picker.refresh_placeholder(window, cx); + picker.set_query("", window, cx); + cx.notify(); + }); + + // returning early to prevent dismissing the modal, so a user can enter + // a remote name first. + return; + } + Entry::NewRemoteName { name, url } => { + self.create_remote(name.clone(), url.to_string(), window, cx); + } + Entry::NewBranch { name } => { + let from_branch = if secondary { + self.default_branch.clone() + } else { + None + }; + self.create_branch(from_branch, name.into(), window, cx); + } + } cx.emit(DismissEvent); } fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + self.state = PickerState::List; cx.emit(DismissEvent); } @@ -431,141 +814,1134 @@ impl PickerDelegate for BranchListDelegate { let entry = &self.matches.get(ix)?; let (commit_time, author_name, subject) = entry - .branch - .most_recent_commit - .as_ref() - .map(|commit| { - let subject = commit.subject.clone(); - let commit_time = OffsetDateTime::from_unix_timestamp(commit.commit_timestamp) - .unwrap_or_else(|_| OffsetDateTime::now_utc()); - let local_offset = - time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC); - let formatted_time = time_format::format_localized_timestamp( - commit_time, - OffsetDateTime::now_utc(), - local_offset, - time_format::TimestampFormat::Relative, - ); - let author = commit.author_name.clone(); - (Some(formatted_time), Some(author), Some(subject)) + .as_branch() + .and_then(|branch| { + branch.most_recent_commit.as_ref().map(|commit| { + let subject = commit.subject.clone(); + let commit_time = OffsetDateTime::from_unix_timestamp(commit.commit_timestamp) + .unwrap_or_else(|_| OffsetDateTime::now_utc()); + let local_offset = + time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC); + let formatted_time = time_format::format_localized_timestamp( + commit_time, + OffsetDateTime::now_utc(), + local_offset, + time_format::TimestampFormat::Relative, + ); + let author = commit.author_name.clone(); + (Some(formatted_time), Some(author), Some(subject)) + }) }) .unwrap_or_else(|| (None, None, None)); - let icon = if let Some(default_branch) = self.default_branch.clone() - && entry.is_new - { - Some( - IconButton::new("branch-from-default", IconName::GitBranchAlt) - .on_click(cx.listener(move |this, _, window, cx| { - this.delegate.set_selected_index(ix, window, cx); - this.delegate.confirm(true, window, cx); - })) - .tooltip(move |_window, cx| { - Tooltip::for_action( - format!("Create branch based off default: {default_branch}"), - &menu::SecondaryConfirm, - cx, - ) - }), - ) - } else { - None + let entry_icon = match entry { + Entry::NewUrl { .. } | Entry::NewBranch { .. } | Entry::NewRemoteName { .. } => { + Icon::new(IconName::Plus).color(Color::Muted) + } + Entry::Branch { branch, .. } => { + if branch.is_remote() { + Icon::new(IconName::Screen).color(Color::Muted) + } else { + Icon::new(IconName::GitBranchAlt).color(Color::Muted) + } + } }; - let branch_name = if entry.is_new { - h_flex() - .gap_1() - .child( - Icon::new(IconName::Plus) - .size(IconSize::Small) - .color(Color::Muted), - ) - .child( - Label::new(format!("Create branch \"{}\"…", entry.branch.name())) - .single_line() - .truncate(), - ) - .into_any_element() - } else { - h_flex() - .max_w_48() - .child( - HighlightedLabel::new(entry.branch.name().to_owned(), entry.positions.clone()) - .truncate(), - ) - .into_any_element() + let entry_title = match entry { + Entry::NewUrl { .. } => Label::new("Create Remote Repository") + .single_line() + .truncate() + .into_any_element(), + Entry::NewBranch { name } => Label::new(format!("Create Branch: \"{name}\"…")) + .single_line() + .truncate() + .into_any_element(), + Entry::NewRemoteName { name, .. } => Label::new(format!("Create Remote: \"{name}\"")) + .single_line() + .truncate() + .into_any_element(), + Entry::Branch { branch, positions } => { + HighlightedLabel::new(branch.name().to_string(), positions.clone()) + .single_line() + .truncate() + .into_any_element() + } }; + let focus_handle = self.focus_handle.clone(); + let is_new_items = matches!( + entry, + Entry::NewUrl { .. } | Entry::NewBranch { .. } | Entry::NewRemoteName { .. } + ); + + let deleted_branch_icon = |entry_ix: usize, is_head_branch: bool| { + IconButton::new(("delete", entry_ix), IconName::Trash) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + "Delete Branch", + &branch_picker::DeleteBranch, + &focus_handle, + cx, + ) + }) + .disabled(is_head_branch) + .on_click(cx.listener(move |this, _, window, cx| { + this.delegate.delete_at(entry_ix, window, cx); + })) + }; + + let create_from_default_button = self.default_branch.as_ref().map(|default_branch| { + let tooltip_label: SharedString = format!("Create New From: {default_branch}").into(); + let focus_handle = self.focus_handle.clone(); + + IconButton::new("create_from_default", IconName::GitBranchPlus) + .tooltip(move |_, cx| { + Tooltip::for_action_in( + tooltip_label.clone(), + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(true, window, cx); + })) + .into_any_element() + }); + Some( - ListItem::new(SharedString::from(format!("vcs-menu-{ix}"))) + ListItem::new(format!("vcs-menu-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) - .tooltip({ - let branch_name = entry.branch.name().to_string(); - if entry.is_new { - Tooltip::text(format!("Create branch \"{}\"", branch_name)) - } else { - Tooltip::text(branch_name) - } - }) .child( - v_flex() + h_flex() .w_full() - .overflow_hidden() + .gap_3() + .flex_grow() + .child(entry_icon) .child( - h_flex() - .gap_6() - .justify_between() - .overflow_x_hidden() - .child(branch_name) - .when_some(commit_time, |label, commit_time| { - label.child( - Label::new(commit_time) - .size(LabelSize::Small) - .color(Color::Muted) - .into_element(), - ) - }), - ) - .when(self.style == BranchListStyle::Modal, |el| { - el.child(div().max_w_96().child({ - let message = if entry.is_new { - if let Some(current_branch) = - self.repo.as_ref().and_then(|repo| { - repo.read(cx).branch.as_ref().map(|b| b.name()) + v_flex() + .id("info_container") + .w_full() + .child(entry_title) + .child( + h_flex() + .w_full() + .justify_between() + .gap_1p5() + .when(self.style == BranchListStyle::Modal, |el| { + el.child(div().max_w_96().child({ + let message = match entry { + Entry::NewUrl { url } => { + format!("Based off {url}") + } + Entry::NewRemoteName { url, .. } => { + format!("Based off {url}") + } + Entry::NewBranch { .. } => { + if let Some(current_branch) = + self.repo.as_ref().and_then(|repo| { + repo.read(cx) + .branch + .as_ref() + .map(|b| b.name()) + }) + { + format!("Based off {}", current_branch) + } else { + "Based off the current branch" + .to_string() + } + } + Entry::Branch { .. } => { + let show_author_name = + ProjectSettings::get_global(cx) + .git + .branch_picker + .show_author_name; + + subject.map_or( + "No commits found".into(), + |subject| { + if show_author_name + && let Some(author) = + author_name + { + format!( + "{} • {}", + author, subject + ) + } else { + subject.to_string() + } + }, + ) + } + }; + + Label::new(message) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate() + })) }) - { - format!("based off {}", current_branch) - } else { - "based off the current branch".to_string() - } - } else { - let show_author_name = ProjectSettings::get_global(cx) - .git - .branch_picker - .show_author_name; - - subject.map_or("no commits found".into(), |subject| { - if show_author_name && author_name.is_some() { - format!("{} • {}", author_name.unwrap(), subject) - } else { - subject.to_string() - } + .when_some(commit_time, |label, commit_time| { + label.child( + Label::new(commit_time) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), + ) + .when_some( + entry.as_branch().map(|b| b.name().to_string()), + |this, branch_name| this.tooltip(Tooltip::text(branch_name)), + ), + ), + ) + .when( + self.editor_position() == PickerEditorPosition::End && !is_new_items, + |this| { + this.map(|this| { + let is_head_branch = + entry.as_branch().is_some_and(|branch| branch.is_head); + if self.selected_index() == ix { + this.end_slot(deleted_branch_icon(ix, is_head_branch)) + } else { + this.end_hover_slot(deleted_branch_icon(ix, is_head_branch)) + } + }) + }, + ) + .when_some( + if self.editor_position() == PickerEditorPosition::End && is_new_items { + create_from_default_button + } else { + None + }, + |this, create_from_default_button| { + this.map(|this| { + if self.selected_index() == ix { + this.end_slot(create_from_default_button) + } else { + this.end_hover_slot(create_from_default_button) + } + }) + }, + ), + ) + } + + fn render_header( + &self, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + matches!(self.state, PickerState::List).then(|| { + let label = match self.branch_filter { + BranchFilter::All => "Branches", + BranchFilter::Remote => "Remotes", + }; + + ListHeader::new(label).inset(true).into_any_element() + }) + } + + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { + if self.editor_position() == PickerEditorPosition::End { + return None; + } + let focus_handle = self.focus_handle.clone(); + + let footer_container = || { + h_flex() + .w_full() + .p_1p5() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + }; + + match self.state { + PickerState::List => { + let selected_entry = self.matches.get(self.selected_index); + + let branch_from_default_button = self + .default_branch + .as_ref() + .filter(|_| matches!(selected_entry, Some(Entry::NewBranch { .. }))) + .map(|default_branch| { + let button_label = format!("Create New From: {default_branch}"); + + Button::new("branch-from-default", button_label) + .key_binding( + KeyBinding::for_action_in( + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(true, window, cx); + })) + }); + + let delete_and_select_btns = h_flex() + .gap_1() + .child( + Button::new("delete-branch", "Delete") + .key_binding( + KeyBinding::for_action_in( + &branch_picker::DeleteBranch, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window + .dispatch_action(branch_picker::DeleteBranch.boxed_clone(), cx); + }), + ) + .child( + Button::new("select_branch", "Select") + .key_binding( + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(false, window, cx); + })), + ); + + Some( + footer_container() + .map(|this| { + if branch_from_default_button.is_some() { + this.justify_end().when_some( + branch_from_default_button, + |this, button| { + this.child(button).child( + Button::new("create", "Create") + .key_binding( + KeyBinding::for_action_in( + &menu::Confirm, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(false, window, cx); + })), + ) + }, + ) + } else { + this.justify_between() + .child({ + let focus_handle = focus_handle.clone(); + Button::new("filter-remotes", "Filter Remotes") + .toggle_state(matches!( + self.branch_filter, + BranchFilter::Remote + )) + .key_binding( + KeyBinding::for_action_in( + &branch_picker::FilterRemotes, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_click, window, cx| { + window.dispatch_action( + branch_picker::FilterRemotes.boxed_clone(), + cx, + ); + }) }) - }; - Label::new(message) - .size(LabelSize::Small) - .truncate() - .color(Color::Muted) + .child(delete_and_select_btns) + } + }) + .into_any_element(), + ) + } + PickerState::NewBranch => { + let branch_from_default_button = + self.default_branch.as_ref().map(|default_branch| { + let button_label = format!("Create New From: {default_branch}"); + + Button::new("branch-from-default", button_label) + .key_binding( + KeyBinding::for_action_in( + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(true, window, cx); })) - }), + }); + + Some( + footer_container() + .gap_1() + .justify_end() + .when_some(branch_from_default_button, |this, button| { + this.child(button) + }) + .child( + Button::new("branch-from-default", "Create") + .key_binding( + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(false, window, cx); + })), + ) + .into_any_element(), ) - .end_slot::(icon), + } + PickerState::CreateRemote(_) => Some( + footer_container() + .justify_end() + .child( + Button::new("branch-from-default", "Confirm") + .key_binding( + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.confirm(false, window, cx); + })) + .disabled(self.last_query.is_empty()), + ) + .into_any_element(), + ), + PickerState::NewRemote => None, + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use super::*; + use git::repository::{CommitSummary, Remote}; + use gpui::{AppContext, TestAppContext, VisualTestContext}; + use project::{FakeFs, Project}; + use rand::{Rng, rngs::StdRng}; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + }); + } + + fn create_test_branch( + name: &str, + is_head: bool, + remote_name: Option<&str>, + timestamp: Option, + ) -> Branch { + let ref_name = match remote_name { + Some(remote_name) => format!("refs/remotes/{remote_name}/{name}"), + None => format!("refs/heads/{name}"), + }; + + Branch { + is_head, + ref_name: ref_name.into(), + upstream: None, + most_recent_commit: timestamp.map(|ts| CommitSummary { + sha: "abc123".into(), + commit_timestamp: ts, + author_name: "Test Author".into(), + subject: "Test commit".into(), + has_parent: true, + }), + } + } + + fn create_test_branches() -> Vec { + vec![ + create_test_branch("main", true, None, Some(1000)), + create_test_branch("feature-auth", false, None, Some(900)), + create_test_branch("feature-ui", false, None, Some(800)), + create_test_branch("develop", false, None, Some(700)), + ] + } + + async fn init_branch_list_test( + repository: Option>, + branches: Vec, + cx: &mut TestAppContext, + ) -> (Entity, VisualTestContext) { + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let workspace = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); + + let branch_list = workspace + .update(cx, |workspace, window, cx| { + cx.new(|cx| { + let mut delegate = BranchListDelegate::new( + workspace.weak_handle(), + repository, + BranchListStyle::Modal, + cx, + ); + delegate.all_branches = Some(branches); + let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); + let picker_focus_handle = picker.focus_handle(cx); + picker.update(cx, |picker, _| { + picker.delegate.focus_handle = picker_focus_handle.clone(); + }); + + let _subscription = cx.subscribe(&picker, |_, _, _, cx| { + cx.emit(DismissEvent); + }); + + BranchList { + picker, + picker_focus_handle, + width: rems(34.), + _subscription, + } + }) + }) + .unwrap(); + + let cx = VisualTestContext::from_window(*workspace, cx); + + (branch_list, cx) + } + + async fn init_fake_repository(cx: &mut TestAppContext) -> Entity { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + ".git": {}, + "file.txt": "buffer_text".to_string() + }), ) + .await; + fs.set_head_for_repo( + path!("/dir/.git").as_ref(), + &[("file.txt", "test".to_string())], + "deadbeef", + ); + fs.set_index_for_repo( + path!("/dir/.git").as_ref(), + &[("file.txt", "index_text".to_string())], + ); + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let repository = cx.read(|cx| project.read(cx).active_repository(cx)); + + repository.unwrap() } - fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { - None + #[gpui::test] + async fn test_update_branch_matches_with_query(cx: &mut TestAppContext) { + init_test(cx); + + let branches = create_test_branches(); + let (branch_list, mut ctx) = init_branch_list_test(None, branches, cx).await; + let cx = &mut ctx; + + branch_list + .update_in(cx, |branch_list, window, cx| { + let query = "feature".to_string(); + branch_list.picker.update(cx, |picker, cx| { + picker.delegate.update_matches(query, window, cx) + }) + }) + .await; + cx.run_until_parked(); + + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + // Should have 2 existing branches + 1 "create new branch" entry = 3 total + assert_eq!(picker.delegate.matches.len(), 3); + assert!( + picker + .delegate + .matches + .iter() + .any(|m| m.name() == "feature-auth") + ); + assert!( + picker + .delegate + .matches + .iter() + .any(|m| m.name() == "feature-ui") + ); + // Verify the last entry is the "create new branch" option + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_branch()); + }) + }); + } + + async fn update_branch_list_matches_with_empty_query( + branch_list: &Entity, + cx: &mut VisualTestContext, + ) { + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker.delegate.update_matches(String::new(), window, cx) + }) + }) + .await; + cx.run_until_parked(); + } + + #[gpui::test] + async fn test_delete_branch(cx: &mut TestAppContext) { + init_test(cx); + let repository = init_fake_repository(cx).await; + + let branches = create_test_branches(); + + let branch_names = branches + .iter() + .map(|branch| branch.name().to_string()) + .collect::>(); + let repo = repository.clone(); + cx.spawn(async move |mut cx| { + for branch in branch_names { + repo.update(&mut cx, |repo, _| repo.create_branch(branch, None)) + .unwrap() + .await + .unwrap() + .unwrap(); + } + }) + .await; + cx.run_until_parked(); + + let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; + let cx = &mut ctx; + + update_branch_list_matches_with_empty_query(&branch_list, cx).await; + + let branch_to_delete = branch_list.update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + assert_eq!(picker.delegate.matches.len(), 4); + let branch_to_delete = picker.delegate.matches.get(1).unwrap().name().to_string(); + picker.delegate.delete_at(1, window, cx); + branch_to_delete + }) + }); + cx.run_until_parked(); + + branch_list.update(cx, move |branch_list, cx| { + branch_list.picker.update(cx, move |picker, _cx| { + assert_eq!(picker.delegate.matches.len(), 3); + let branches = picker + .delegate + .matches + .iter() + .map(|be| be.name()) + .collect::>(); + assert_eq!( + branches, + ["main", "feature-auth", "feature-ui", "develop"] + .into_iter() + .filter(|name| name != &branch_to_delete) + .collect::>() + ); + }) + }); + } + + #[gpui::test] + async fn test_delete_remote(cx: &mut TestAppContext) { + init_test(cx); + let repository = init_fake_repository(cx).await; + let branches = vec![ + create_test_branch("main", true, Some("origin"), Some(1000)), + create_test_branch("feature-auth", false, Some("origin"), Some(900)), + create_test_branch("feature-ui", false, Some("fork"), Some(800)), + create_test_branch("develop", false, Some("private"), Some(700)), + ]; + + let remote_names = branches + .iter() + .filter_map(|branch| branch.remote_name().map(|r| r.to_string())) + .collect::>(); + let repo = repository.clone(); + cx.spawn(async move |mut cx| { + for branch in remote_names { + repo.update(&mut cx, |repo, _| { + repo.create_remote(branch, String::from("test")) + }) + .unwrap() + .await + .unwrap() + .unwrap(); + } + }) + .await; + cx.run_until_parked(); + + let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; + let cx = &mut ctx; + // Enable remote filter + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + picker.delegate.branch_filter = BranchFilter::Remote; + }); + }); + update_branch_list_matches_with_empty_query(&branch_list, cx).await; + + // Check matches, it should match all existing branches and no option to create new branch + let branch_to_delete = branch_list.update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + assert_eq!(picker.delegate.matches.len(), 4); + let branch_to_delete = picker.delegate.matches.get(1).unwrap().name().to_string(); + picker.delegate.delete_at(1, window, cx); + branch_to_delete + }) + }); + cx.run_until_parked(); + + // Check matches, it should match one less branch than before + branch_list.update(cx, move |branch_list, cx| { + branch_list.picker.update(cx, move |picker, _cx| { + assert_eq!(picker.delegate.matches.len(), 3); + let branches = picker + .delegate + .matches + .iter() + .map(|be| be.name()) + .collect::>(); + assert_eq!( + branches, + [ + "origin/main", + "origin/feature-auth", + "fork/feature-ui", + "private/develop" + ] + .into_iter() + .filter(|name| name != &branch_to_delete) + .collect::>() + ); + }) + }); + } + + #[gpui::test] + async fn test_branch_filter_shows_all_then_remotes_and_applies_query(cx: &mut TestAppContext) { + init_test(cx); + + let branches = vec![ + create_test_branch("main", true, Some("origin"), Some(1000)), + create_test_branch("feature-auth", false, Some("fork"), Some(900)), + create_test_branch("feature-ui", false, None, Some(800)), + create_test_branch("develop", false, None, Some(700)), + ]; + + let (branch_list, mut ctx) = init_branch_list_test(None, branches, cx).await; + let cx = &mut ctx; + + update_branch_list_matches_with_empty_query(&branch_list, cx).await; + + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.matches.len(), 4); + + let branches = picker + .delegate + .matches + .iter() + .map(|be| be.name()) + .collect::>(); + assert_eq!( + branches, + ["origin/main", "fork/feature-auth", "feature-ui", "develop"] + .into_iter() + .collect::>() + ); + + // Locals should be listed before remotes. + let ordered = picker + .delegate + .matches + .iter() + .map(|be| be.name()) + .collect::>(); + assert_eq!( + ordered, + vec!["feature-ui", "develop", "origin/main", "fork/feature-auth"] + ); + + // Verify the last entry is NOT the "create new branch" option + let last_match = picker.delegate.matches.last().unwrap(); + assert!(!last_match.is_new_branch()); + assert!(!last_match.is_new_url()); + }) + }); + + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + picker.delegate.branch_filter = BranchFilter::Remote; + }) + }); + + update_branch_list_matches_with_empty_query(&branch_list, cx).await; + + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + assert_eq!(picker.delegate.matches.len(), 2); + let branches = picker + .delegate + .matches + .iter() + .map(|be| be.name()) + .collect::>(); + assert_eq!( + branches, + ["origin/main", "fork/feature-auth"] + .into_iter() + .collect::>() + ); + + // Verify the last entry is NOT the "create new branch" option + let last_match = picker.delegate.matches.last().unwrap(); + assert!(!last_match.is_new_url()); + picker.delegate.branch_filter = BranchFilter::Remote; + picker + .delegate + .update_matches(String::from("fork"), window, cx) + }) + }) + .await; + cx.run_until_parked(); + + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + // Should have 1 existing branch + 1 "create new branch" entry = 2 total + assert_eq!(picker.delegate.matches.len(), 2); + assert!( + picker + .delegate + .matches + .iter() + .any(|m| m.name() == "fork/feature-auth") + ); + // Verify the last entry is the "create new branch" option + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_branch()); + }) + }); + } + + #[gpui::test] + async fn test_new_branch_creation_with_query(test_cx: &mut TestAppContext) { + const MAIN_BRANCH: &str = "main"; + const FEATURE_BRANCH: &str = "feature"; + const NEW_BRANCH: &str = "new-feature-branch"; + + init_test(test_cx); + let repository = init_fake_repository(test_cx).await; + + let branches = vec![ + create_test_branch(MAIN_BRANCH, true, None, Some(1000)), + create_test_branch(FEATURE_BRANCH, false, None, Some(900)), + ]; + + let (branch_list, mut ctx) = + init_branch_list_test(repository.into(), branches, test_cx).await; + let cx = &mut ctx; + + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .update_matches(NEW_BRANCH.to_string(), window, cx) + }) + }) + .await; + + cx.run_until_parked(); + + branch_list.update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_branch()); + assert_eq!(last_match.name(), NEW_BRANCH); + // State is NewBranch because no existing branches fuzzy-match the query + assert!(matches!(picker.delegate.state, PickerState::NewBranch)); + picker.delegate.confirm(false, window, cx); + }) + }); + cx.run_until_parked(); + + let branches = branch_list + .update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .repo + .as_ref() + .unwrap() + .update(cx, |repo, _cx| repo.branches()) + }) + }) + .await + .unwrap() + .unwrap(); + + let new_branch = branches + .into_iter() + .find(|branch| branch.name() == NEW_BRANCH) + .expect("new-feature-branch should exist"); + assert_eq!( + new_branch.ref_name.as_ref(), + &format!("refs/heads/{NEW_BRANCH}"), + "branch ref_name should not have duplicate refs/heads/ prefix" + ); + } + + #[gpui::test] + async fn test_remote_url_detection_https(cx: &mut TestAppContext) { + init_test(cx); + let repository = init_fake_repository(cx).await; + let branches = vec![create_test_branch("main", true, None, Some(1000))]; + + let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; + let cx = &mut ctx; + + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + let query = "https://github.com/user/repo.git".to_string(); + picker.delegate.update_matches(query, window, cx) + }) + }) + .await; + + cx.run_until_parked(); + + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_url()); + assert!(matches!(picker.delegate.state, PickerState::NewRemote)); + picker.delegate.confirm(false, window, cx); + assert_eq!(picker.delegate.matches.len(), 0); + if let PickerState::CreateRemote(remote_url) = &picker.delegate.state + && remote_url.as_ref() == "https://github.com/user/repo.git" + { + } else { + panic!("wrong picker state"); + } + picker + .delegate + .update_matches("my_new_remote".to_string(), window, cx) + }) + }) + .await; + + cx.run_until_parked(); + + branch_list.update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + assert_eq!(picker.delegate.matches.len(), 1); + assert!(matches!( + picker.delegate.matches.first(), + Some(Entry::NewRemoteName { name, url }) + if name == "my_new_remote" && url.as_ref() == "https://github.com/user/repo.git" + )); + picker.delegate.confirm(false, window, cx); + }) + }); + cx.run_until_parked(); + + // List remotes + let remotes = branch_list + .update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .repo + .as_ref() + .unwrap() + .update(cx, |repo, _cx| repo.get_remotes(None, false)) + }) + }) + .await + .unwrap() + .unwrap(); + assert_eq!( + remotes, + vec![Remote { + name: SharedString::from("my_new_remote".to_string()) + }] + ); + } + + #[gpui::test] + async fn test_confirm_remote_url_transitions(cx: &mut TestAppContext) { + init_test(cx); + + let branches = vec![create_test_branch("main_branch", true, None, Some(1000))]; + let (branch_list, mut ctx) = init_branch_list_test(None, branches, cx).await; + let cx = &mut ctx; + + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + let query = "https://github.com/user/repo.git".to_string(); + picker.delegate.update_matches(query, window, cx) + }) + }) + .await; + cx.run_until_parked(); + + // Try to create a new remote but cancel in the middle of the process + branch_list + .update_in(cx, |branch_list, window, cx| { + branch_list.picker.update(cx, |picker, cx| { + picker.delegate.selected_index = picker.delegate.matches.len() - 1; + picker.delegate.confirm(false, window, cx); + + assert!(matches!( + picker.delegate.state, + PickerState::CreateRemote(_) + )); + if let PickerState::CreateRemote(ref url) = picker.delegate.state { + assert_eq!(url.as_ref(), "https://github.com/user/repo.git"); + } + assert_eq!(picker.delegate.matches.len(), 0); + picker.delegate.dismissed(window, cx); + assert!(matches!(picker.delegate.state, PickerState::List)); + let query = "main".to_string(); + picker.delegate.update_matches(query, window, cx) + }) + }) + .await; + cx.run_until_parked(); + + // Try to search a branch again to see if the state is restored properly + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + // Should have 1 existing branch + 1 "create new branch" entry = 2 total + assert_eq!(picker.delegate.matches.len(), 2); + assert!( + picker + .delegate + .matches + .iter() + .any(|m| m.name() == "main_branch") + ); + // Verify the last entry is the "create new branch" option + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_branch()); + }) + }); + } + + #[gpui::test] + async fn test_confirm_remote_url_does_not_dismiss(cx: &mut TestAppContext) { + const REMOTE_URL: &str = "https://github.com/user/repo.git"; + + init_test(cx); + let branches = vec![create_test_branch("main", true, None, Some(1000))]; + + let (branch_list, mut ctx) = init_branch_list_test(None, branches, cx).await; + let cx = &mut ctx; + + let subscription = cx.update(|_, cx| { + cx.subscribe(&branch_list, |_, _: &DismissEvent, _| { + panic!("DismissEvent should not be emitted when confirming a remote URL"); + }) + }); + + branch_list + .update_in(cx, |branch_list, window, cx| { + window.focus(&branch_list.picker_focus_handle, cx); + assert!( + branch_list.picker_focus_handle.is_focused(window), + "Branch picker should be focused when selecting an entry" + ); + + branch_list.picker.update(cx, |picker, cx| { + picker + .delegate + .update_matches(REMOTE_URL.to_string(), window, cx) + }) + }) + .await; + + cx.run_until_parked(); + + branch_list.update_in(cx, |branch_list, window, cx| { + // Re-focus the picker since workspace initialization during run_until_parked + window.focus(&branch_list.picker_focus_handle, cx); + + branch_list.picker.update(cx, |picker, cx| { + let last_match = picker.delegate.matches.last().unwrap(); + assert!(last_match.is_new_url()); + assert!(matches!(picker.delegate.state, PickerState::NewRemote)); + + picker.delegate.confirm(false, window, cx); + + assert!( + matches!(picker.delegate.state, PickerState::CreateRemote(ref url) if url.as_ref() == REMOTE_URL), + "State should transition to CreateRemote with the URL" + ); + }); + + assert!( + branch_list.picker_focus_handle.is_focused(window), + "Branch list picker should still be focused after confirming remote URL" + ); + }); + + cx.run_until_parked(); + + drop(subscription); + } + + #[gpui::test(iterations = 10)] + async fn test_empty_query_displays_all_branches(mut rng: StdRng, cx: &mut TestAppContext) { + init_test(cx); + let branch_count = rng.random_range(13..540); + + let branches: Vec = (0..branch_count) + .map(|i| create_test_branch(&format!("branch-{:02}", i), i == 0, None, Some(i * 100))) + .collect(); + + let (branch_list, mut ctx) = init_branch_list_test(None, branches, cx).await; + let cx = &mut ctx; + + update_branch_list_matches_with_empty_query(&branch_list, cx).await; + + branch_list.update(cx, |branch_list, cx| { + branch_list.picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.matches.len(), branch_count as usize); + }) + }); } } diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 45b1563dca0ceed5ed2ac488026fe94084050780..e154933adc794221159c7f1b28b3d1e33cf1854d 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -139,7 +139,7 @@ impl CommitModal { && !git_panel.amend_pending() { git_panel.set_amend_pending(true, cx); - git_panel.load_last_commit_message_if_empty(cx); + git_panel.load_last_commit_message(cx); } } ForceMode::Commit => { @@ -337,6 +337,7 @@ impl CommitModal { active_repo, is_amend_pending, is_signoff_enabled, + workspace, ) = self.git_panel.update(cx, |git_panel, cx| { let (can_commit, tooltip) = git_panel.configure_commit_button(cx); let title = git_panel.commit_button_title(); @@ -354,6 +355,7 @@ impl CommitModal { active_repo, is_amend_pending, is_signoff_enabled, + git_panel.workspace.clone(), ) }); @@ -375,7 +377,14 @@ impl CommitModal { .style(ButtonStyle::Transparent); let branch_picker = PopoverMenu::new("popover-button") - .menu(move |window, cx| Some(branch_picker::popover(active_repo.clone(), window, cx))) + .menu(move |window, cx| { + Some(branch_picker::popover( + workspace.clone(), + active_repo.clone(), + window, + cx, + )) + }) .with_handle(self.branch_list_handle.clone()) .trigger_with_tooltip( branch_picker_button, @@ -492,60 +501,27 @@ impl CommitModal { } } - fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { - if self.git_panel.read(cx).amend_pending() { - return; + fn on_commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { + if self.git_panel.update(cx, |git_panel, cx| { + git_panel.commit(&self.commit_editor.focus_handle(cx), window, cx) + }) { + telemetry::event!("Git Committed", source = "Git Modal"); + cx.emit(DismissEvent); } - telemetry::event!("Git Committed", source = "Git Modal"); - self.git_panel.update(cx, |git_panel, cx| { - git_panel.commit_changes( - CommitOptions { - amend: false, - signoff: git_panel.signoff_enabled(), - }, - window, - cx, - ) - }); - cx.emit(DismissEvent); } - fn amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context) { - if self - .git_panel - .read(cx) - .active_repository - .as_ref() - .and_then(|repo| repo.read(cx).head_commit.as_ref()) - .is_none() - { - return; - } - if !self.git_panel.read(cx).amend_pending() { - self.git_panel.update(cx, |git_panel, cx| { - git_panel.set_amend_pending(true, cx); - git_panel.load_last_commit_message_if_empty(cx); - }); - } else { + fn on_amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context) { + if self.git_panel.update(cx, |git_panel, cx| { + git_panel.amend(&self.commit_editor.focus_handle(cx), window, cx) + }) { telemetry::event!("Git Amended", source = "Git Modal"); - self.git_panel.update(cx, |git_panel, cx| { - git_panel.set_amend_pending(false, cx); - git_panel.commit_changes( - CommitOptions { - amend: true, - signoff: git_panel.signoff_enabled(), - }, - window, - cx, - ); - }); cx.emit(DismissEvent); } } fn toggle_branch_selector(&mut self, window: &mut Window, cx: &mut Context) { if self.branch_list_handle.is_focused(window, cx) { - self.focus_handle(cx).focus(window) + self.focus_handle(cx).focus(window, cx) } else { self.branch_list_handle.toggle(window, cx); } @@ -564,8 +540,8 @@ impl Render for CommitModal { .id("commit-modal") .key_context("GitCommit") .on_action(cx.listener(Self::dismiss)) - .on_action(cx.listener(Self::commit)) - .on_action(cx.listener(Self::amend)) + .on_action(cx.listener(Self::on_commit)) + .on_action(cx.listener(Self::on_amend)) .when(!DisableAiSettings::get_global(cx).disable_ai, |this| { this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| { this.git_panel.update(cx, |panel, cx| { @@ -611,8 +587,8 @@ impl Render for CommitModal { .bg(cx.theme().colors().editor_background) .border_1() .border_color(cx.theme().colors().border_variant) - .on_click(cx.listener(move |_, _: &ClickEvent, window, _cx| { - window.focus(&editor_focus_handle); + .on_click(cx.listener(move |_, _: &ClickEvent, window, cx| { + window.focus(&editor_focus_handle, cx); })) .child( div() diff --git a/crates/git_ui/src/commit_tooltip.rs b/crates/git_ui/src/commit_tooltip.rs index 7646d1d64f58c24d112eb929646efec983b8e69b..d18770a704ff31d6dffd705baf44defaaf6d8d4a 100644 --- a/crates/git_ui/src/commit_tooltip.rs +++ b/crates/git_ui/src/commit_tooltip.rs @@ -3,7 +3,7 @@ use editor::hover_markdown_style; use futures::Future; use git::blame::BlameEntry; use git::repository::CommitSummary; -use git::{GitRemote, blame::ParsedCommitMessage}; +use git::{GitRemote, commit::ParsedCommitMessage}; use gpui::{ App, Asset, ClipboardItem, Element, Entity, MouseButton, ParentElement, Render, ScrollHandle, StatefulInteractiveElement, WeakEntity, prelude::*, @@ -29,11 +29,16 @@ pub struct CommitDetails { pub struct CommitAvatar<'a> { sha: &'a SharedString, remote: Option<&'a GitRemote>, + size: Option, } impl<'a> CommitAvatar<'a> { pub fn new(sha: &'a SharedString, remote: Option<&'a GitRemote>) -> Self { - Self { sha, remote } + Self { + sha, + remote, + size: None, + } } pub fn from_commit_details(details: &'a CommitDetails) -> Self { @@ -43,28 +48,37 @@ impl<'a> CommitAvatar<'a> { .message .as_ref() .and_then(|details| details.remote.as_ref()), + size: None, } } -} -impl<'a> CommitAvatar<'a> { - pub fn render(&'a self, window: &mut Window, cx: &mut App) -> Option> { + pub fn size(mut self, size: IconSize) -> Self { + self.size = Some(size); + self + } + + pub fn render(&'a self, window: &mut Window, cx: &mut App) -> AnyElement { + match self.avatar(window, cx) { + // Loading or no avatar found + None => Icon::new(IconName::Person) + .color(Color::Muted) + .when_some(self.size, |this, size| this.size(size)) + .into_any_element(), + // Found + Some(avatar) => avatar + .when_some(self.size, |this, size| this.size(size.rems())) + .into_any_element(), + } + } + + pub fn avatar(&'a self, window: &mut Window, cx: &mut App) -> Option { let remote = self .remote .filter(|remote| remote.host_supports_avatars())?; - let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha.clone()); - let element = match window.use_asset::(&avatar_url, cx) { - // Loading or no avatar found - None | Some(None) => Icon::new(IconName::Person) - .color(Color::Muted) - .into_element() - .into_any(), - // Found - Some(Some(url)) => Avatar::new(url.to_string()).into_element().into_any(), - }; - Some(element) + let url = window.use_asset::(&avatar_url, cx)??; + Some(Avatar::new(url.to_string())) } } @@ -197,10 +211,7 @@ impl Render for CommitTooltip { time_format::TimestampFormat::MediumAbsolute, ); let markdown_style = { - let mut style = hover_markdown_style(window, cx); - if let Some(code_block) = &style.code_block.text { - style.base_text_style.refine(code_block); - } + let style = hover_markdown_style(window, cx); style }; @@ -256,7 +267,7 @@ impl Render for CommitTooltip { .gap_x_2() .overflow_x_hidden() .flex_wrap() - .children(avatar) + .child(avatar) .child(author) .when(!author_email.is_empty(), |this| { this.child( @@ -323,6 +334,7 @@ impl Render for CommitTooltip { repo.downgrade(), workspace.clone(), None, + None, window, cx, ); diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 765e1f84a4a3a5b7e257e51df9a9542d0abff067..0f5420fec4169f8e3d945dd8bd0987ebbaba8d19 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -1,11 +1,16 @@ use anyhow::{Context as _, Result}; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; -use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_context_lines}; +use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; +use editor::{Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; use git::repository::{CommitDetails, CommitDiff, RepoPath}; +use git::{ + BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, ParsedGitRemote, + parse_git_remote_url, +}; use gpui::{ - Action, AnyElement, AnyView, App, AppContext as _, AsyncApp, AsyncWindowContext, Context, - Entity, EventEmitter, FocusHandle, Focusable, IntoElement, PromptLevel, Render, Task, - WeakEntity, Window, actions, + AnyElement, App, AppContext as _, AsyncApp, AsyncWindowContext, Context, Element, Entity, + EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ParentElement, + PromptLevel, Render, Styled, Task, WeakEntity, Window, actions, }; use language::{ Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, OffsetRangeExt as _, @@ -15,14 +20,13 @@ use multi_buffer::PathKey; use project::{Project, WorktreeId, git_store::Repository}; use std::{ any::{Any, TypeId}, - fmt::Write as _, path::PathBuf, sync::Arc, }; -use ui::{ - Button, Color, Icon, IconName, Label, LabelCommon as _, SharedString, Tooltip, prelude::*, -}; +use theme::ActiveTheme; +use ui::{DiffStat, Tooltip, prelude::*}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; +use workspace::item::TabTooltipContent; use workspace::{ Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, @@ -32,20 +36,21 @@ use workspace::{ searchable::SearchableItemHandle, }; +use crate::commit_tooltip::CommitAvatar; use crate::git_panel::GitPanel; actions!(git, [ApplyCurrentStash, PopCurrentStash, DropCurrentStash,]); pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _window, _cx| { - register_workspace_action(workspace, |toolbar, _: &ApplyCurrentStash, window, cx| { - toolbar.apply_stash(window, cx); + workspace.register_action(|workspace, _: &ApplyCurrentStash, window, cx| { + CommitView::apply_stash(workspace, window, cx); }); - register_workspace_action(workspace, |toolbar, _: &DropCurrentStash, window, cx| { - toolbar.remove_stash(window, cx); + workspace.register_action(|workspace, _: &DropCurrentStash, window, cx| { + CommitView::remove_stash(workspace, window, cx); }); - register_workspace_action(workspace, |toolbar, _: &PopCurrentStash, window, cx| { - toolbar.pop_stash(window, cx); + workspace.register_action(|workspace, _: &PopCurrentStash, window, cx| { + CommitView::pop_stash(workspace, window, cx); }); }) .detach(); @@ -56,20 +61,18 @@ pub struct CommitView { editor: Entity, stash: Option, multibuffer: Entity, + repository: Entity, + remote: Option, } struct GitBlob { path: RepoPath, worktree_id: WorktreeId, is_deleted: bool, + display_name: Arc, } -struct CommitMetadataFile { - title: Arc, - worktree_id: WorktreeId, -} - -const COMMIT_METADATA_SORT_PREFIX: u64 = 0; +const COMMIT_MESSAGE_SORT_PREFIX: u64 = 0; const FILE_NAMESPACE_SORT_PREFIX: u64 = 1; impl CommitView { @@ -78,6 +81,7 @@ impl CommitView { repo: WeakEntity, workspace: WeakEntity, stash: Option, + file_filter: Option, window: &mut Window, cx: &mut App, ) { @@ -91,8 +95,14 @@ impl CommitView { window .spawn(cx, async move |cx| { let (commit_diff, commit_details) = futures::join!(commit_diff?, commit_details?); - let commit_diff = commit_diff.log_err()?.log_err()?; + let mut commit_diff = commit_diff.log_err()?.log_err()?; let commit_details = commit_details.log_err()?.log_err()?; + + // Filter to specific file if requested + if let Some(ref filter_path) = file_filter { + commit_diff.files.retain(|f| &f.path == filter_path); + } + let repo = repo.upgrade()?; workspace @@ -140,64 +150,87 @@ impl CommitView { ) -> Self { let language_registry = project.read(cx).languages().clone(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadOnly)); + + let message_buffer = cx.new(|cx| { + let mut buffer = Buffer::local(commit.message.clone(), cx); + buffer.set_capability(Capability::ReadOnly, cx); + buffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + let snapshot = message_buffer.read(cx).snapshot(); + let full_range = Point::zero()..snapshot.max_point(); + let range = ExcerptRange { + context: full_range.clone(), + primary: full_range, + }; + multibuffer.set_excerpt_ranges_for_path( + PathKey::with_sort_prefix( + COMMIT_MESSAGE_SORT_PREFIX, + RelPath::unix("commit message").unwrap().into(), + ), + message_buffer.clone(), + &snapshot, + vec![range], + cx, + ) + }); + let editor = cx.new(|cx| { let mut editor = Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx); + editor.disable_inline_diagnostics(); + editor.set_show_breakpoints(false, cx); editor.set_expand_all_diff_hunks(cx); + editor.disable_header_for_buffer(message_buffer.read(cx).remote_id(), cx); + editor.disable_indent_guides_for_buffer(message_buffer.read(cx).remote_id(), cx); + + editor.insert_blocks( + [BlockProperties { + placement: BlockPlacement::Above(editor::Anchor::min()), + height: Some(1), + style: BlockStyle::Sticky, + render: Arc::new(|_| gpui::Empty.into_any_element()), + priority: 0, + }] + .into_iter() + .chain( + editor + .buffer() + .read(cx) + .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx) + .map(|anchor| BlockProperties { + placement: BlockPlacement::Below(anchor), + height: Some(1), + style: BlockStyle::Sticky, + render: Arc::new(|_| gpui::Empty.into_any_element()), + priority: 0, + }), + ), + None, + cx, + ); + editor }); + let commit_sha = Arc::::from(commit.sha.as_ref()); + let first_worktree_id = project .read(cx) .worktrees(cx) .next() .map(|worktree| worktree.read(cx).id()); - let mut metadata_buffer_id = None; - if let Some(worktree_id) = first_worktree_id { - let title = if let Some(stash) = stash { - format!("stash@{{{}}}", stash) - } else { - format!("commit {}", commit.sha) - }; - let file = Arc::new(CommitMetadataFile { - title: RelPath::unix(&title).unwrap().into(), - worktree_id, - }); - let buffer = cx.new(|cx| { - let buffer = TextBuffer::new_normalized( - ReplicaId::LOCAL, - cx.entity_id().as_non_zero_u64().into(), - LineEnding::default(), - format_commit(&commit, stash.is_some()).into(), - ); - metadata_buffer_id = Some(buffer.remote_id()); - Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite) - }); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::with_sort_prefix(COMMIT_METADATA_SORT_PREFIX, file.title.clone()), - buffer.clone(), - vec![Point::zero()..buffer.read(cx).max_point()], - 0, - cx, - ); - }); - editor.update(cx, |editor, cx| { - editor.disable_header_for_buffer(metadata_buffer_id.unwrap(), cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.select_ranges(vec![0..0]); - }); - }); - } + let repository_clone = repository.clone(); cx.spawn(async move |this, cx| { for file in commit_diff.files { let is_deleted = file.new_text.is_none(); let new_text = file.new_text.unwrap_or_default(); let old_text = file.old_text; - let worktree_id = repository + let worktree_id = repository_clone .update(cx, |repository, cx| { repository .repo_path_to_project_path(&file.path, cx) @@ -205,10 +238,20 @@ impl CommitView { .or(first_worktree_id) })? .context("project has no worktrees")?; + let short_sha = commit_sha.get(0..7).unwrap_or(&commit_sha); + let file_name = file + .path + .file_name() + .map(|name| name.to_string()) + .unwrap_or_else(|| file.path.display(PathStyle::Posix).to_string()); + let display_name: Arc = + Arc::from(format!("{short_sha} - {file_name}").into_boxed_str()); + let file = Arc::new(GitBlob { path: file.path.clone(), is_deleted, worktree_id, + display_name, }) as Arc; let buffer = build_buffer(new_text, file, &language_registry, cx).await?; @@ -218,16 +261,22 @@ impl CommitView { this.update(cx, |this, cx| { this.multibuffer.update(cx, |multibuffer, cx| { let snapshot = buffer.read(cx).snapshot(); - let diff = buffer_diff.read(cx); - let diff_hunk_ranges = diff - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) - .collect::>(); let path = snapshot.file().unwrap().path().clone(); + let excerpt_ranges = { + let mut hunks = buffer_diff.read(cx).hunks(&snapshot, cx).peekable(); + if hunks.peek().is_none() { + vec![language::Point::zero()..snapshot.max_point()] + } else { + hunks + .map(|hunk| hunk.buffer_range.to_point(&snapshot)) + .collect::>() + } + }; + let _is_newly_added = multibuffer.set_excerpts_for_path( PathKey::with_sort_prefix(FILE_NAMESPACE_SORT_PREFIX, path), buffer, - diff_hunk_ranges, + excerpt_ranges, multibuffer_context_lines(cx), cx, ); @@ -235,68 +284,388 @@ impl CommitView { }); })?; } + anyhow::Ok(()) }) .detach(); + let snapshot = repository.read(cx).snapshot(); + let remote_url = snapshot + .remote_upstream_url + .as_ref() + .or(snapshot.remote_origin_url.as_ref()); + + let remote = remote_url.and_then(|url| { + let provider_registry = GitHostingProviderRegistry::default_global(cx); + parse_git_remote_url(provider_registry, url).map(|(host, parsed)| GitRemote { + host, + owner: parsed.owner.into(), + repo: parsed.repo.into(), + }) + }); + Self { commit, editor, multibuffer, stash, + repository, + remote, } } -} -impl language::File for GitBlob { - fn as_local(&self) -> Option<&dyn language::LocalFile> { - None + fn render_commit_avatar( + &self, + sha: &SharedString, + size: impl Into, + window: &mut Window, + cx: &mut App, + ) -> AnyElement { + let size = size.into(); + let avatar = CommitAvatar::new(sha, self.remote.as_ref()); + + v_flex() + .w(size) + .h(size) + .border_1() + .border_color(cx.theme().colors().border) + .rounded_full() + .justify_center() + .items_center() + .child( + avatar + .avatar(window, cx) + .map(|a| a.size(size).into_any_element()) + .unwrap_or_else(|| { + Icon::new(IconName::Person) + .color(Color::Muted) + .size(IconSize::Medium) + .into_any_element() + }), + ) + .into_any() } - fn disk_state(&self) -> DiskState { - if self.is_deleted { - DiskState::Deleted - } else { - DiskState::New + fn calculate_changed_lines(&self, cx: &App) -> (u32, u32) { + let snapshot = self.multibuffer.read(cx).snapshot(cx); + let mut total_additions = 0u32; + let mut total_deletions = 0u32; + + let mut seen_buffers = std::collections::HashSet::new(); + for (_, buffer, _) in snapshot.excerpts() { + let buffer_id = buffer.remote_id(); + if !seen_buffers.insert(buffer_id) { + continue; + } + + let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else { + continue; + }; + + let base_text = diff.base_text(); + + for hunk in diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) { + let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); + total_additions += added_rows; + + let base_start = base_text + .offset_to_point(hunk.diff_base_byte_range.start) + .row; + let base_end = base_text.offset_to_point(hunk.diff_base_byte_range.end).row; + let deleted_rows = base_end.saturating_sub(base_start); + + total_deletions += deleted_rows; + } } - } - fn path_style(&self, _: &App) -> PathStyle { - PathStyle::Posix + (total_additions, total_deletions) } - fn path(&self) -> &Arc { - self.path.as_ref() + fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let commit = &self.commit; + let author_name = commit.author_name.clone(); + let commit_date = time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp) + .unwrap_or_else(|_| time::OffsetDateTime::now_utc()); + let local_offset = time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC); + let date_string = time_format::format_localized_timestamp( + commit_date, + time::OffsetDateTime::now_utc(), + local_offset, + time_format::TimestampFormat::MediumAbsolute, + ); + + let remote_info = self.remote.as_ref().map(|remote| { + let provider = remote.host.name(); + let parsed_remote = ParsedGitRemote { + owner: remote.owner.as_ref().into(), + repo: remote.repo.as_ref().into(), + }; + let params = BuildCommitPermalinkParams { sha: &commit.sha }; + let url = remote + .host + .build_commit_permalink(&parsed_remote, params) + .to_string(); + (provider, url) + }); + + let (additions, deletions) = self.calculate_changed_lines(cx); + + let commit_diff_stat = if additions > 0 || deletions > 0 { + Some(DiffStat::new( + "commit-diff-stat", + additions as usize, + deletions as usize, + )) + } else { + None + }; + + let gutter_width = self.editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let style = editor.style(cx); + let font_id = window.text_system().resolve_font(&style.text.font()); + let font_size = style.text.font_size.to_pixels(window.rem_size()); + snapshot + .gutter_dimensions(font_id, font_size, style, window, cx) + .full_width() + }); + + h_flex() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .w_full() + .child( + h_flex() + .w(gutter_width) + .justify_center() + .child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)), + ) + .child( + h_flex() + .py_4() + .pl_1() + .pr_4() + .w_full() + .items_start() + .justify_between() + .flex_wrap() + .child( + v_flex() + .child( + h_flex() + .gap_1() + .child(Label::new(author_name).color(Color::Default)) + .child( + Label::new(format!("Commit:{}", commit.sha)) + .color(Color::Muted) + .size(LabelSize::Small) + .truncate() + .buffer_font(cx), + ), + ) + .child( + h_flex() + .gap_1p5() + .child( + Label::new(date_string) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child( + Label::new("•") + .color(Color::Ignored) + .size(LabelSize::Small), + ) + .children(commit_diff_stat), + ), + ) + .children(remote_info.map(|(provider_name, url)| { + let icon = match provider_name.as_str() { + "GitHub" => IconName::Github, + _ => IconName::Link, + }; + + Button::new("view_on_provider", format!("View on {}", provider_name)) + .icon(icon) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .on_click(move |_, _, cx| cx.open_url(&url)) + })), + ) } - fn full_path(&self, _: &App) -> PathBuf { - self.path.as_std_path().to_path_buf() + fn apply_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) { + Self::stash_action( + workspace, + "Apply", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, not applying")); + } + Ok(repo.stash_apply(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await?, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); } - fn file_name<'a>(&'a self, _: &'a App) -> &'a str { - self.path.file_name().unwrap() + fn pop_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) { + Self::stash_action( + workspace, + "Pop", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, pop aborted")); + } + Ok(repo.stash_pop(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await?, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); } - fn worktree_id(&self, _: &App) -> WorktreeId { - self.worktree_id + fn remove_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) { + Self::stash_action( + workspace, + "Drop", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, drop aborted")); + } + Ok(repo.stash_drop(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await??, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); } - fn to_proto(&self, _cx: &App) -> language::proto::File { - unimplemented!() + fn stash_action( + workspace: &mut Workspace, + str_action: &str, + window: &mut Window, + cx: &mut App, + callback: AsyncFn, + ) where + AsyncFn: AsyncFnOnce( + Entity, + &SharedString, + usize, + Entity, + WeakEntity, + &mut AsyncWindowContext, + ) -> anyhow::Result<()> + + 'static, + { + let Some(commit_view) = workspace.active_item_as::(cx) else { + return; + }; + let Some(stash) = commit_view.read(cx).stash else { + return; + }; + let sha = commit_view.read(cx).commit.sha.clone(); + let answer = window.prompt( + PromptLevel::Info, + &format!("{} stash@{{{}}}?", str_action, stash), + None, + &[str_action, "Cancel"], + cx, + ); + + let workspace_weak = workspace.weak_handle(); + let commit_view_entity = commit_view; + + window + .spawn(cx, async move |cx| { + if answer.await != Ok(0) { + return anyhow::Ok(()); + } + + let Some(workspace) = workspace_weak.upgrade() else { + return Ok(()); + }; + + let repo = workspace.update(cx, |workspace, cx| { + workspace + .panel::(cx) + .and_then(|p| p.read(cx).active_repository.clone()) + })?; + + let Some(repo) = repo else { + return Ok(()); + }; + + callback(repo, &sha, stash, commit_view_entity, workspace_weak, cx).await?; + anyhow::Ok(()) + }) + .detach_and_notify_err(window, cx); } - fn is_private(&self) -> bool { - false + async fn close_commit_view( + commit_view: Entity, + workspace: WeakEntity, + cx: &mut AsyncWindowContext, + ) -> anyhow::Result<()> { + workspace + .update_in(cx, |workspace, window, cx| { + let active_pane = workspace.active_pane(); + let commit_view_id = commit_view.entity_id(); + active_pane.update(cx, |pane, cx| { + pane.close_item_by_id(commit_view_id, SaveIntent::Skip, window, cx) + }) + })? + .await?; + anyhow::Ok(()) } } -impl language::File for CommitMetadataFile { +impl language::File for GitBlob { fn as_local(&self) -> Option<&dyn language::LocalFile> { None } fn disk_state(&self) -> DiskState { - DiskState::New + if self.is_deleted { + DiskState::Deleted + } else { + DiskState::New + } } fn path_style(&self, _: &App) -> PathStyle { @@ -304,22 +673,22 @@ impl language::File for CommitMetadataFile { } fn path(&self) -> &Arc { - &self.title + self.path.as_ref() } fn full_path(&self, _: &App) -> PathBuf { - PathBuf::from(self.title.as_unix_str().to_owned()) + self.path.as_std_path().to_path_buf() } fn file_name<'a>(&'a self, _: &'a App) -> &'a str { - self.title.file_name().unwrap() + self.display_name.as_ref() } fn worktree_id(&self, _: &App) -> WorktreeId { self.worktree_id } - fn to_proto(&self, _: &App) -> language::proto::File { + fn to_proto(&self, _cx: &App) -> language::proto::File { unimplemented!() } @@ -328,6 +697,45 @@ impl language::File for CommitMetadataFile { } } +// No longer needed since metadata buffer is not created +// impl language::File for CommitMetadataFile { +// fn as_local(&self) -> Option<&dyn language::LocalFile> { +// None +// } +// +// fn disk_state(&self) -> DiskState { +// DiskState::New +// } +// +// fn path_style(&self, _: &App) -> PathStyle { +// PathStyle::Posix +// } +// +// fn path(&self) -> &Arc { +// &self.title +// } +// +// fn full_path(&self, _: &App) -> PathBuf { +// self.title.as_std_path().to_path_buf() +// } +// +// fn file_name<'a>(&'a self, _: &'a App) -> &'a str { +// self.title.file_name().unwrap_or("commit") +// } +// +// fn worktree_id(&self, _: &App) -> WorktreeId { +// self.worktree_id +// } +// +// fn to_proto(&self, _cx: &App) -> language::proto::File { +// unimplemented!() +// } +// +// fn is_private(&self) -> bool { +// false +// } +// } + async fn build_buffer( mut text: String, blob: Arc, @@ -355,7 +763,7 @@ async fn build_buffer( text, ); let mut buffer = Buffer::build(buffer, Some(blob), Capability::ReadWrite); - buffer.set_language(language, cx); + buffer.set_language_async(language, cx); buffer })?; Ok(buffer) @@ -402,45 +810,6 @@ async fn build_buffer_diff( }) } -fn format_commit(commit: &CommitDetails, is_stash: bool) -> String { - let mut result = String::new(); - if is_stash { - writeln!(&mut result, "stash commit {}", commit.sha).unwrap(); - } else { - writeln!(&mut result, "commit {}", commit.sha).unwrap(); - } - writeln!( - &mut result, - "Author: {} <{}>", - commit.author_name, commit.author_email - ) - .unwrap(); - let local_offset = time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC); - writeln!( - &mut result, - "Date: {}", - time_format::format_localized_timestamp( - time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp).unwrap(), - time::OffsetDateTime::now_utc(), - local_offset, - time_format::TimestampFormat::MediumAbsolute, - ), - ) - .unwrap(); - result.push('\n'); - for line in commit.message.split('\n') { - if line.is_empty() { - result.push('\n'); - } else { - writeln!(&mut result, " {}", line).unwrap(); - } - } - if result.ends_with("\n\n") { - result.pop(); - } - result -} - impl EventEmitter for CommitView {} impl Focusable for CommitView { @@ -469,13 +838,28 @@ impl Item for CommitView { fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { let short_sha = self.commit.sha.get(0..7).unwrap_or(&*self.commit.sha); let subject = truncate_and_trailoff(self.commit.message.split('\n').next().unwrap(), 20); - format!("{short_sha} - {subject}").into() + format!("{short_sha} — {subject}").into() } - fn tab_tooltip_text(&self, _: &App) -> Option { + fn tab_tooltip_content(&self, _: &App) -> Option { let short_sha = self.commit.sha.get(0..16).unwrap_or(&*self.commit.sha); let subject = self.commit.message.split('\n').next().unwrap(); - Some(format!("{short_sha} - {subject}").into()) + + Some(TabTooltipContent::Custom(Box::new(Tooltip::element({ + let subject = subject.to_string(); + let short_sha = short_sha.to_string(); + + move |_, _| { + v_flex() + .child(Label::new(subject.clone())) + .child( + Label::new(short_sha.clone()) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element() + } + })))) } fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { @@ -496,17 +880,17 @@ impl Item for CommitView { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } @@ -540,11 +924,11 @@ impl Item for CommitView { } fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { - ToolbarItemLocation::PrimaryLeft + ToolbarItemLocation::Hidden } - fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { - self.editor.breadcrumbs(theme, cx) + fn breadcrumbs(&self, _theme: &theme::Theme, _cx: &App) -> Option> { + None } fn added_to_workspace( @@ -582,192 +966,46 @@ impl Item for CommitView { multibuffer, commit: self.commit.clone(), stash: self.stash, + repository: self.repository.clone(), + remote: self.remote.clone(), } }))) } } impl Render for CommitView { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_stash = self.stash.is_some(); - div() + + v_flex() .key_context(if is_stash { "StashDiff" } else { "CommitDiff" }) - .bg(cx.theme().colors().editor_background) - .flex() - .items_center() - .justify_center() .size_full() - .child(self.editor.clone()) + .bg(cx.theme().colors().editor_background) + .child(self.render_header(window, cx)) + .when(!self.editor.read(cx).is_empty(cx), |this| { + this.child(div().flex_grow().child(self.editor.clone())) + }) } } pub struct CommitViewToolbar { commit_view: Option>, - workspace: WeakEntity, } impl CommitViewToolbar { - pub fn new(workspace: &Workspace, _: &mut Context) -> Self { - Self { - commit_view: None, - workspace: workspace.weak_handle(), - } - } - - fn commit_view(&self, _: &App) -> Option> { - self.commit_view.as_ref()?.upgrade() + pub fn new() -> Self { + Self { commit_view: None } } +} - async fn close_commit_view( - commit_view: Entity, - workspace: WeakEntity, - cx: &mut AsyncWindowContext, - ) -> anyhow::Result<()> { - workspace - .update_in(cx, |workspace, window, cx| { - let active_pane = workspace.active_pane(); - let commit_view_id = commit_view.entity_id(); - active_pane.update(cx, |pane, cx| { - pane.close_item_by_id(commit_view_id, SaveIntent::Skip, window, cx) - }) - })? - .await?; - anyhow::Ok(()) - } - - fn apply_stash(&mut self, window: &mut Window, cx: &mut Context) { - self.stash_action( - "Apply", - window, - cx, - async move |repository, sha, stash, commit_view, workspace, cx| { - let result = repository.update(cx, |repo, cx| { - if !stash_matches_index(&sha, stash, repo) { - return Err(anyhow::anyhow!("Stash has changed, not applying")); - } - Ok(repo.stash_apply(Some(stash), cx)) - })?; - - match result { - Ok(task) => task.await?, - Err(err) => { - Self::close_commit_view(commit_view, workspace, cx).await?; - return Err(err); - } - }; - Self::close_commit_view(commit_view, workspace, cx).await?; - anyhow::Ok(()) - }, - ); - } - - fn pop_stash(&mut self, window: &mut Window, cx: &mut Context) { - self.stash_action( - "Pop", - window, - cx, - async move |repository, sha, stash, commit_view, workspace, cx| { - let result = repository.update(cx, |repo, cx| { - if !stash_matches_index(&sha, stash, repo) { - return Err(anyhow::anyhow!("Stash has changed, pop aborted")); - } - Ok(repo.stash_pop(Some(stash), cx)) - })?; - - match result { - Ok(task) => task.await?, - Err(err) => { - Self::close_commit_view(commit_view, workspace, cx).await?; - return Err(err); - } - }; - Self::close_commit_view(commit_view, workspace, cx).await?; - anyhow::Ok(()) - }, - ); - } - - fn remove_stash(&mut self, window: &mut Window, cx: &mut Context) { - self.stash_action( - "Drop", - window, - cx, - async move |repository, sha, stash, commit_view, workspace, cx| { - let result = repository.update(cx, |repo, cx| { - if !stash_matches_index(&sha, stash, repo) { - return Err(anyhow::anyhow!("Stash has changed, drop aborted")); - } - Ok(repo.stash_drop(Some(stash), cx)) - })?; - - match result { - Ok(task) => task.await??, - Err(err) => { - Self::close_commit_view(commit_view, workspace, cx).await?; - return Err(err); - } - }; - Self::close_commit_view(commit_view, workspace, cx).await?; - anyhow::Ok(()) - }, - ); - } - - fn stash_action( - &mut self, - str_action: &str, - window: &mut Window, - cx: &mut Context, - callback: AsyncFn, - ) where - AsyncFn: AsyncFnOnce( - Entity, - &SharedString, - usize, - Entity, - WeakEntity, - &mut AsyncWindowContext, - ) -> anyhow::Result<()> - + 'static, - { - let Some(commit_view) = self.commit_view(cx) else { - return; - }; - let Some(stash) = commit_view.read(cx).stash else { - return; - }; - let sha = commit_view.read(cx).commit.sha.clone(); - let answer = window.prompt( - PromptLevel::Info, - &format!("{} stash@{{{}}}?", str_action, stash), - None, - &[str_action, "Cancel"], - cx, - ); - - let workspace = self.workspace.clone(); - cx.spawn_in(window, async move |_, cx| { - if answer.await != Ok(0) { - return anyhow::Ok(()); - } - let repo = workspace.update(cx, |workspace, cx| { - workspace - .panel::(cx) - .and_then(|p| p.read(cx).active_repository.clone()) - })?; +impl EventEmitter for CommitViewToolbar {} - let Some(repo) = repo else { - return Ok(()); - }; - callback(repo, &sha, stash, commit_view, workspace, cx).await?; - anyhow::Ok(()) - }) - .detach_and_notify_err(window, cx); +impl Render for CommitViewToolbar { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().hidden() } } -impl EventEmitter for CommitViewToolbar {} - impl ToolbarItemView for CommitViewToolbar { fn set_active_pane_item( &mut self, @@ -793,84 +1031,10 @@ impl ToolbarItemView for CommitViewToolbar { } } -impl Render for CommitViewToolbar { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let Some(commit_view) = self.commit_view(cx) else { - return div(); - }; - - let is_stash = commit_view.read(cx).stash.is_some(); - if !is_stash { - return div(); - } - - let focus_handle = commit_view.focus_handle(cx); - - h_group_xl().my_neg_1().py_1().items_center().child( - h_group_sm() - .child( - Button::new("apply-stash", "Apply") - .tooltip(Tooltip::for_action_title_in( - "Apply current stash", - &ApplyCurrentStash, - &focus_handle, - )) - .on_click(cx.listener(|this, _, window, cx| this.apply_stash(window, cx))), - ) - .child( - Button::new("pop-stash", "Pop") - .tooltip(Tooltip::for_action_title_in( - "Pop current stash", - &PopCurrentStash, - &focus_handle, - )) - .on_click(cx.listener(|this, _, window, cx| this.pop_stash(window, cx))), - ) - .child( - Button::new("remove-stash", "Remove") - .icon(IconName::Trash) - .tooltip(Tooltip::for_action_title_in( - "Remove current stash", - &DropCurrentStash, - &focus_handle, - )) - .on_click(cx.listener(|this, _, window, cx| this.remove_stash(window, cx))), - ), - ) - } -} - -fn register_workspace_action( - workspace: &mut Workspace, - callback: fn(&mut CommitViewToolbar, &A, &mut Window, &mut Context), -) { - workspace.register_action(move |workspace, action: &A, window, cx| { - if workspace.has_active_modal(window, cx) { - cx.propagate(); - return; - } - - workspace.active_pane().update(cx, |pane, cx| { - pane.toolbar().update(cx, move |workspace, cx| { - if let Some(toolbar) = workspace.item_of_type::() { - toolbar.update(cx, move |toolbar, cx| { - callback(toolbar, action, window, cx); - cx.notify(); - }); - } - }); - }) - }); -} - -fn stash_matches_index(sha: &str, index: usize, repo: &mut Repository) -> bool { - match repo - .cached_stash() +fn stash_matches_index(sha: &str, stash_index: usize, repo: &Repository) -> bool { + repo.stash_entries .entries - .iter() - .find(|entry| entry.index == index) - { - Some(entry) => entry.oid.to_string() == sha, - None => false, - } + .get(stash_index) + .map(|entry| entry.oid.to_string() == sha) + .unwrap_or(false) } diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 91cc3ce76b3f10aa310185b566b6c6086580b69c..813e63ab8c96e736cf0cc126526a683b418c2137 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -111,6 +111,7 @@ fn excerpt_for_buffer_updated( ); } +#[ztracing::instrument(skip_all)] fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context) { let Some(project) = editor.project() else { return; @@ -166,6 +167,7 @@ fn buffers_removed(editor: &mut Editor, removed_buffer_ids: &[BufferId], cx: &mu editor.remove_blocks(removed_block_ids, None, cx); } +#[ztracing::instrument(skip_all)] fn conflicts_updated( editor: &mut Editor, conflict_set: Entity, @@ -311,6 +313,7 @@ fn conflicts_updated( } } +#[ztracing::instrument(skip_all)] fn update_conflict_highlighting( editor: &mut Editor, conflict: &ConflictRegion, @@ -372,7 +375,7 @@ fn render_conflict_buttons( .gap_1() .bg(cx.theme().colors().editor_background) .child( - Button::new("head", "Use HEAD") + Button::new("head", format!("Use {}", conflict.ours_branch_name)) .label_size(LabelSize::Small) .on_click({ let editor = editor.clone(); @@ -392,7 +395,7 @@ fn render_conflict_buttons( }), ) .child( - Button::new("origin", "Use Origin") + Button::new("origin", format!("Use {}", conflict.theirs_branch_name)) .label_size(LabelSize::Small) .on_click({ let editor = editor.clone(); diff --git a/crates/git_ui/src/file_diff_view.rs b/crates/git_ui/src/file_diff_view.rs index 815eaf871ef17f055e60df34cff3e9b9741fb3fb..b020d7a9f3ac083f1a5adf15ca298b55063a3eb8 100644 --- a/crates/git_ui/src/file_diff_view.rs +++ b/crates/git_ui/src/file_diff_view.rs @@ -5,8 +5,8 @@ use buffer_diff::{BufferDiff, BufferDiffSnapshot}; use editor::{Editor, EditorEvent, MultiBuffer}; use futures::{FutureExt, select_biased}; use gpui::{ - AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, - FocusHandle, Focusable, IntoElement, Render, Task, Window, + AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, + Focusable, IntoElement, Render, Task, Window, }; use language::Buffer; use project::Project; @@ -108,7 +108,7 @@ impl FileDiffView { for buffer in [&old_buffer, &new_buffer] { cx.subscribe(buffer, move |this, _, event, _| match event { language::BufferEvent::Edited - | language::BufferEvent::LanguageChanged + | language::BufferEvent::LanguageChanged(_) | language::BufferEvent::Reparsed => { this.buffer_changes_tx.send(()).ok(); } @@ -268,17 +268,17 @@ impl Item for FileDiffView { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.clone().into()) } else { None } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.editor.clone())) } diff --git a/crates/git_ui/src/file_history_view.rs b/crates/git_ui/src/file_history_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..f48160719ba5d9b00b8961b75e9ea402c80dd06a --- /dev/null +++ b/crates/git_ui/src/file_history_view.rs @@ -0,0 +1,669 @@ +use anyhow::Result; +use futures::Future; +use git::repository::{FileHistory, FileHistoryEntry, RepoPath}; +use git::{GitHostingProviderRegistry, GitRemote, parse_git_remote_url}; +use gpui::{ + AnyElement, AnyEntity, App, Asset, Context, Entity, EventEmitter, FocusHandle, Focusable, + IntoElement, Render, ScrollStrategy, Task, UniformListScrollHandle, WeakEntity, Window, + actions, uniform_list, +}; +use project::{ + Project, ProjectPath, + git_store::{GitStore, Repository}, +}; +use std::any::{Any, TypeId}; + +use time::OffsetDateTime; +use ui::{Avatar, Chip, Divider, ListItem, WithScrollbar, prelude::*}; +use util::ResultExt; +use workspace::{ + Item, Workspace, + item::{ItemEvent, SaveOptions}, +}; + +use crate::commit_view::CommitView; + +actions!(git, [ViewCommitFromHistory, LoadMoreHistory]); + +pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _window, _cx| { + workspace.register_action(|_workspace, _: &ViewCommitFromHistory, _window, _cx| {}); + workspace.register_action(|_workspace, _: &LoadMoreHistory, _window, _cx| {}); + }) + .detach(); +} + +const PAGE_SIZE: usize = 50; + +pub struct FileHistoryView { + history: FileHistory, + repository: WeakEntity, + git_store: WeakEntity, + workspace: WeakEntity, + remote: Option, + selected_entry: Option, + scroll_handle: UniformListScrollHandle, + focus_handle: FocusHandle, + loading_more: bool, + has_more: bool, +} + +impl FileHistoryView { + pub fn open( + path: RepoPath, + git_store: WeakEntity, + repo: WeakEntity, + workspace: WeakEntity, + window: &mut Window, + cx: &mut App, + ) { + let file_history_task = git_store + .update(cx, |git_store, cx| { + repo.upgrade().map(|repo| { + git_store.file_history_paginated(&repo, path.clone(), 0, Some(PAGE_SIZE), cx) + }) + }) + .ok() + .flatten(); + + window + .spawn(cx, async move |cx| { + let file_history = file_history_task?.await.log_err()?; + let repo = repo.upgrade()?; + + workspace + .update_in(cx, |workspace, window, cx| { + let project = workspace.project(); + let view = cx.new(|cx| { + FileHistoryView::new( + file_history, + git_store.clone(), + repo.clone(), + workspace.weak_handle(), + project.clone(), + window, + cx, + ) + }); + + let pane = workspace.active_pane(); + pane.update(cx, |pane, cx| { + let ix = pane.items().position(|item| { + let view = item.downcast::(); + view.is_some_and(|v| v.read(cx).history.path == path) + }); + if let Some(ix) = ix { + pane.activate_item(ix, true, true, window, cx); + } else { + pane.add_item(Box::new(view), true, true, None, window, cx); + } + }) + }) + .log_err() + }) + .detach(); + } + + fn new( + history: FileHistory, + git_store: WeakEntity, + repository: Entity, + workspace: WeakEntity, + _project: Entity, + _window: &mut Window, + cx: &mut Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + let scroll_handle = UniformListScrollHandle::new(); + let has_more = history.entries.len() >= PAGE_SIZE; + + let snapshot = repository.read(cx).snapshot(); + let remote_url = snapshot + .remote_upstream_url + .as_ref() + .or(snapshot.remote_origin_url.as_ref()); + + let remote = remote_url.and_then(|url| { + let provider_registry = GitHostingProviderRegistry::default_global(cx); + parse_git_remote_url(provider_registry, url).map(|(host, parsed)| GitRemote { + host, + owner: parsed.owner.into(), + repo: parsed.repo.into(), + }) + }); + + Self { + history, + git_store, + repository: repository.downgrade(), + workspace, + remote, + selected_entry: None, + scroll_handle, + focus_handle, + loading_more: false, + has_more, + } + } + + fn load_more(&mut self, window: &mut Window, cx: &mut Context) { + if self.loading_more || !self.has_more { + return; + } + + self.loading_more = true; + cx.notify(); + + let current_count = self.history.entries.len(); + let path = self.history.path.clone(); + let git_store = self.git_store.clone(); + let repo = self.repository.clone(); + + let this = cx.weak_entity(); + let task = window.spawn(cx, async move |cx| { + let file_history_task = git_store + .update(cx, |git_store, cx| { + repo.upgrade().map(|repo| { + git_store.file_history_paginated( + &repo, + path, + current_count, + Some(PAGE_SIZE), + cx, + ) + }) + }) + .ok() + .flatten(); + + if let Some(task) = file_history_task { + if let Ok(more_history) = task.await { + this.update(cx, |this, cx| { + this.loading_more = false; + this.has_more = more_history.entries.len() >= PAGE_SIZE; + this.history.entries.extend(more_history.entries); + cx.notify(); + }) + .ok(); + } + } + }); + + task.detach(); + } + + fn select_next(&mut self, _: &menu::SelectNext, _: &mut Window, cx: &mut Context) { + let entry_count = self.history.entries.len(); + let ix = match self.selected_entry { + _ if entry_count == 0 => None, + None => Some(0), + Some(ix) => { + if ix == entry_count - 1 { + Some(0) + } else { + Some(ix + 1) + } + } + }; + self.select_ix(ix, cx); + } + + fn select_previous( + &mut self, + _: &menu::SelectPrevious, + _: &mut Window, + cx: &mut Context, + ) { + let entry_count = self.history.entries.len(); + let ix = match self.selected_entry { + _ if entry_count == 0 => None, + None => Some(entry_count - 1), + Some(ix) => { + if ix == 0 { + Some(entry_count - 1) + } else { + Some(ix - 1) + } + } + }; + self.select_ix(ix, cx); + } + + fn select_first(&mut self, _: &menu::SelectFirst, _: &mut Window, cx: &mut Context) { + let entry_count = self.history.entries.len(); + let ix = if entry_count != 0 { Some(0) } else { None }; + self.select_ix(ix, cx); + } + + fn select_last(&mut self, _: &menu::SelectLast, _: &mut Window, cx: &mut Context) { + let entry_count = self.history.entries.len(); + let ix = if entry_count != 0 { + Some(entry_count - 1) + } else { + None + }; + self.select_ix(ix, cx); + } + + fn select_ix(&mut self, ix: Option, cx: &mut Context) { + self.selected_entry = ix; + if let Some(ix) = ix { + self.scroll_handle.scroll_to_item(ix, ScrollStrategy::Top); + } + cx.notify(); + } + + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + self.open_commit_view(window, cx); + } + + fn open_commit_view(&mut self, window: &mut Window, cx: &mut Context) { + let Some(entry) = self + .selected_entry + .and_then(|ix| self.history.entries.get(ix)) + else { + return; + }; + + if let Some(repo) = self.repository.upgrade() { + let sha_str = entry.sha.to_string(); + CommitView::open( + sha_str, + repo.downgrade(), + self.workspace.clone(), + None, + Some(self.history.path.clone()), + window, + cx, + ); + } + } + + fn render_commit_avatar( + &self, + sha: &SharedString, + window: &mut Window, + cx: &mut App, + ) -> impl IntoElement { + let remote = self.remote.as_ref().filter(|r| r.host_supports_avatars()); + let size = rems_from_px(20.); + + if let Some(remote) = remote { + let avatar_asset = CommitAvatarAsset::new(remote.clone(), sha.clone()); + if let Some(Some(url)) = window.use_asset::(&avatar_asset, cx) { + Avatar::new(url.to_string()).size(size) + } else { + Avatar::new("").size(size) + } + } else { + Avatar::new("").size(size) + } + } + + fn render_commit_entry( + &self, + ix: usize, + entry: &FileHistoryEntry, + window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let pr_number = entry + .subject + .rfind("(#") + .and_then(|start| { + let rest = &entry.subject[start + 2..]; + rest.find(')') + .and_then(|end| rest[..end].parse::().ok()) + }) + .map(|num| format!("#{}", num)) + .unwrap_or_else(|| { + if entry.sha.len() >= 7 { + entry.sha[..7].to_string() + } else { + entry.sha.to_string() + } + }); + + let commit_time = OffsetDateTime::from_unix_timestamp(entry.commit_timestamp) + .unwrap_or_else(|_| OffsetDateTime::UNIX_EPOCH); + let relative_timestamp = time_format::format_localized_timestamp( + commit_time, + OffsetDateTime::now_utc(), + time::UtcOffset::current_local_offset().unwrap_or(time::UtcOffset::UTC), + time_format::TimestampFormat::Relative, + ); + + ListItem::new(("commit", ix)) + .toggle_state(Some(ix) == self.selected_entry) + .child( + h_flex() + .h_8() + .w_full() + .pl_0p5() + .pr_2p5() + .gap_2() + .child( + div() + .w(rems_from_px(52.)) + .flex_none() + .child(Chip::new(pr_number)), + ) + .child(self.render_commit_avatar(&entry.sha, window, cx)) + .child( + h_flex() + .min_w_0() + .w_full() + .justify_between() + .child( + h_flex() + .min_w_0() + .w_full() + .gap_1() + .child( + Label::new(entry.author_name.clone()) + .size(LabelSize::Small) + .color(Color::Default) + .truncate(), + ) + .child( + Label::new(&entry.subject) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), + ), + ) + .child( + h_flex().flex_none().child( + Label::new(relative_timestamp) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ), + ), + ) + .on_click(cx.listener(move |this, _, window, cx| { + this.selected_entry = Some(ix); + cx.notify(); + + this.open_commit_view(window, cx); + })) + .into_any_element() + } +} + +#[derive(Clone, Debug)] +struct CommitAvatarAsset { + sha: SharedString, + remote: GitRemote, +} + +impl std::hash::Hash for CommitAvatarAsset { + fn hash(&self, state: &mut H) { + self.sha.hash(state); + self.remote.host.name().hash(state); + } +} + +impl CommitAvatarAsset { + fn new(remote: GitRemote, sha: SharedString) -> Self { + Self { remote, sha } + } +} + +impl Asset for CommitAvatarAsset { + type Source = Self; + type Output = Option; + + fn load( + source: Self::Source, + cx: &mut App, + ) -> impl Future + Send + 'static { + let client = cx.http_client(); + async move { + match source + .remote + .host + .commit_author_avatar_url( + &source.remote.owner, + &source.remote.repo, + source.sha.clone(), + client, + ) + .await + { + Ok(Some(url)) => Some(SharedString::from(url.to_string())), + Ok(None) => None, + Err(_) => None, + } + } + } +} + +impl EventEmitter for FileHistoryView {} + +impl Focusable for FileHistoryView { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for FileHistoryView { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let _file_name = self.history.path.file_name().unwrap_or("File"); + let entry_count = self.history.entries.len(); + + v_flex() + .id("file_history_view") + .key_context("FileHistoryView") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .size_full() + .bg(cx.theme().colors().editor_background) + .child( + h_flex() + .h(rems_from_px(41.)) + .pl_3() + .pr_2() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child( + Label::new(self.history.path.as_unix_str().to_string()) + .color(Color::Muted) + .buffer_font(cx), + ) + .child( + h_flex() + .gap_1p5() + .child( + Label::new(format!("{} commits", entry_count)) + .size(LabelSize::Small) + .color(Color::Muted) + .when(self.has_more, |this| this.mr_1()), + ) + .when(self.has_more, |this| { + this.child(Divider::vertical()).child( + Button::new("load-more", "Load More") + .disabled(self.loading_more) + .label_size(LabelSize::Small) + .icon(IconName::ArrowCircle) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .on_click(cx.listener(|this, _, window, cx| { + this.load_more(window, cx); + })), + ) + }), + ), + ) + .child( + v_flex() + .flex_1() + .size_full() + .child({ + let view = cx.weak_entity(); + uniform_list( + "file-history-list", + entry_count, + move |range, window, cx| { + let Some(view) = view.upgrade() else { + return Vec::new(); + }; + view.update(cx, |this, cx| { + let mut items = Vec::with_capacity(range.end - range.start); + for ix in range { + if let Some(entry) = this.history.entries.get(ix) { + items.push( + this.render_commit_entry(ix, entry, window, cx), + ); + } + } + items + }) + }, + ) + .flex_1() + .size_full() + .track_scroll(&self.scroll_handle) + }) + .vertical_scrollbar_for(&self.scroll_handle, window, cx), + ) + } +} + +impl Item for FileHistoryView { + type Event = ItemEvent; + + fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) { + f(*event) + } + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + let file_name = self + .history + .path + .file_name() + .map(|name| name.to_string()) + .unwrap_or_else(|| "File".to_string()); + format!("History: {}", file_name).into() + } + + fn tab_tooltip_text(&self, _cx: &App) -> Option { + Some(format!("Git history for {}", self.history.path.as_unix_str()).into()) + } + + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(Icon::new(IconName::GitBranch)) + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("file history") + } + + fn clone_on_split( + &self, + _workspace_id: Option, + _window: &mut Window, + _cx: &mut Context, + ) -> Task>> { + Task::ready(None) + } + + fn navigate(&mut self, _: Box, _window: &mut Window, _: &mut Context) -> bool { + false + } + + fn deactivated(&mut self, _window: &mut Window, _: &mut Context) {} + + fn can_save(&self, _: &App) -> bool { + false + } + + fn save( + &mut self, + _options: SaveOptions, + _project: Entity, + _window: &mut Window, + _: &mut Context, + ) -> Task> { + Task::ready(Ok(())) + } + + fn save_as( + &mut self, + _project: Entity, + _path: ProjectPath, + _window: &mut Window, + _: &mut Context, + ) -> Task> { + Task::ready(Ok(())) + } + + fn reload( + &mut self, + _project: Entity, + _window: &mut Window, + _: &mut Context, + ) -> Task> { + Task::ready(Ok(())) + } + + fn is_dirty(&self, _: &App) -> bool { + false + } + + fn has_conflict(&self, _: &App) -> bool { + false + } + + fn breadcrumbs( + &self, + _theme: &theme::Theme, + _cx: &App, + ) -> Option> { + None + } + + fn added_to_workspace( + &mut self, + _workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + window.focus(&self.focus_handle, cx); + } + + fn show_toolbar(&self) -> bool { + true + } + + fn pixel_position_of_cursor(&self, _: &App) -> Option> { + None + } + + fn set_nav_history( + &mut self, + _: workspace::ItemNavHistory, + _window: &mut Window, + _: &mut Context, + ) { + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.clone().into()) + } else { + None + } + } +} diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 85cfb3b499f5cc2baefdc23f8e0ffc91f09b620d..4e94a811510ee07707bf729040d41fc8b1eb922c 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -6,15 +6,22 @@ use crate::project_diff::{self, Diff, ProjectDiff}; use crate::remote_output::{self, RemoteAction, SuccessMessage}; use crate::{branch_picker, picker_prompt, render_remote_button}; use crate::{ - git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector, + file_history_view::FileHistoryView, git_panel_settings::GitPanelSettings, git_status_icon, + repository_selector::RepositorySelector, }; use agent_settings::AgentSettings; use anyhow::Context as _; use askpass::AskPassDelegate; +use cloud_llm_client::CompletionIntent; +use collections::{BTreeMap, HashMap, HashSet}; use db::kvp::KEY_VALUE_STORE; -use editor::{Editor, EditorElement, EditorMode, MultiBuffer}; +use editor::RewrapOptions; +use editor::{ + Direction, Editor, EditorElement, EditorMode, MultiBuffer, MultiBufferOffset, + actions::ExpandAllDiffHunks, +}; use futures::StreamExt as _; -use git::blame::ParsedCommitMessage; +use git::commit::ParsedCommitMessage; use git::repository::{ Branch, CommitDetails, CommitOptions, CommitSummary, DiffType, FetchOptions, GitCommitter, PushOptions, Remote, RemoteCommandOutput, ResetMode, Upstream, UpstreamTracking, @@ -24,21 +31,22 @@ use git::stash::GitStash; use git::status::StageStatus; use git::{Amend, Signoff, ToggleStaged, repository::RepoPath, status::FileStatus}; use git::{ - ExpandCommitEditor, RestoreTrackedFiles, StageAll, StashAll, StashApply, StashPop, - TrashUntrackedFiles, UnstageAll, + ExpandCommitEditor, GitHostingProviderRegistry, RestoreTrackedFiles, StageAll, StashAll, + StashApply, StashPop, TrashUntrackedFiles, UnstageAll, }; use gpui::{ - Action, AsyncApp, AsyncWindowContext, ClickEvent, Corner, DismissEvent, Entity, EventEmitter, - FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, - MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy, Subscription, Task, - UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list, + Action, AsyncApp, AsyncWindowContext, Bounds, ClickEvent, Corner, DismissEvent, Entity, + EventEmitter, FocusHandle, Focusable, KeyContext, MouseButton, MouseDownEvent, Point, + PromptLevel, ScrollStrategy, Subscription, Task, UniformListScrollHandle, WeakEntity, actions, + anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; use language::{Buffer, File}; use language_model::{ - ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, + Role, ZED_CLOUD_PROVIDER_ID, }; -use menu::{Confirm, SecondaryConfirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; +use menu; use multi_buffer::ExcerptInfo; use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{ @@ -48,30 +56,30 @@ use panel::{ use project::{ Fs, Project, ProjectPath, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op}, + project_settings::{GitPathStyle, ProjectSettings}, }; +use prompt_store::{PromptId, PromptStore, RULES_FILE_NAMES}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore, StatusStyle}; use std::future::Future; use std::ops::Range; use std::path::Path; -use std::{collections::HashSet, sync::Arc, time::Duration, usize}; +use std::{sync::Arc, time::Duration, usize}; use strum::{IntoEnumIterator, VariantNames}; use time::OffsetDateTime; use ui::{ - ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, PopoverMenu, ScrollAxes, - Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*, + ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IndentGuideColors, + PopoverMenu, RenderedIndentGuide, ScrollAxes, Scrollbars, SplitButton, Tooltip, WithScrollbar, + prelude::*, }; use util::paths::PathStyle; -use util::{ResultExt, TryFutureExt, maybe}; +use util::{ResultExt, TryFutureExt, maybe, rel_path::RelPath}; use workspace::SERIALIZATION_THROTTLE_TIME; - -use cloud_llm_client::CompletionIntent; use workspace::{ Workspace, dock::{DockPosition, Panel, PanelEvent}, - notifications::{DetachAndPromptErr, ErrorMessagePrompt, NotificationId}, + notifications::{DetachAndPromptErr, ErrorMessagePrompt, NotificationId, NotifyResultExt}, }; - actions!( git_panel, [ @@ -85,10 +93,24 @@ actions!( FocusEditor, /// Focuses on the changes list. FocusChanges, + /// Select next git panel menu item, and show it in the diff view + NextEntry, + /// Select previous git panel menu item, and show it in the diff view + PreviousEntry, + /// Select first git panel menu item, and show it in the diff view + FirstEntry, + /// Select last git panel menu item, and show it in the diff view + LastEntry, /// Toggles automatic co-author suggestions. ToggleFillCoAuthors, /// Toggles sorting entries by path vs status. ToggleSortByPath, + /// Toggles showing entries in tree vs flat view. + ToggleTreeView, + /// Expands the selected entry to show its children. + ExpandSelectedEntry, + /// Collapses the selected entry to hide its children. + CollapseSelectedEntry, ] ); @@ -119,6 +141,7 @@ struct GitMenuState { has_new_changes: bool, sort_by_path: bool, has_stash_items: bool, + tree_view: bool, } fn git_panel_context_menu( @@ -163,20 +186,33 @@ fn git_panel_context_menu( ) .separator() .entry( - if state.sort_by_path { - "Sort by Status" + if state.tree_view { + "Flat View" } else { - "Sort by Path" + "Tree View" }, - Some(Box::new(ToggleSortByPath)), - move |window, cx| window.dispatch_action(Box::new(ToggleSortByPath), cx), + Some(Box::new(ToggleTreeView)), + move |window, cx| window.dispatch_action(Box::new(ToggleTreeView), cx), ) + .when(!state.tree_view, |this| { + this.entry( + if state.sort_by_path { + "Sort by Status" + } else { + "Sort by Path" + }, + Some(Box::new(ToggleSortByPath)), + move |window, cx| window.dispatch_action(Box::new(ToggleSortByPath), cx), + ) + }) }) } const GIT_PANEL_KEY: &str = "GitPanel"; const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); +// TODO: We should revise this part. It seems the indentation width is not aligned with the one in project panel +const TREE_INDENT: f32 = 16.0; pub fn register(workspace: &mut Workspace) { workspace.register_action(|workspace, _: &ToggleFocus, window, cx| { @@ -201,7 +237,7 @@ struct SerializedGitPanel { signoff_enabled: bool, } -#[derive(Debug, PartialEq, Eq, Clone, Copy)] +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] enum Section { Conflict, Tracked, @@ -237,6 +273,8 @@ impl GitHeaderEntry { #[derive(Debug, PartialEq, Eq, Clone)] enum GitListEntry { Status(GitStatusEntry), + TreeStatus(GitTreeStatusEntry), + Directory(GitTreeDirEntry), Header(GitHeaderEntry), } @@ -244,11 +282,215 @@ impl GitListEntry { fn status_entry(&self) -> Option<&GitStatusEntry> { match self { GitListEntry::Status(entry) => Some(entry), + GitListEntry::TreeStatus(entry) => Some(&entry.entry), + _ => None, + } + } + + fn directory_entry(&self) -> Option<&GitTreeDirEntry> { + match self { + GitListEntry::Directory(entry) => Some(entry), _ => None, } } } +enum GitPanelViewMode { + Flat, + Tree(TreeViewState), +} + +impl GitPanelViewMode { + fn from_settings(cx: &App) -> Self { + if GitPanelSettings::get_global(cx).tree_view { + GitPanelViewMode::Tree(TreeViewState::default()) + } else { + GitPanelViewMode::Flat + } + } + + fn tree_state(&self) -> Option<&TreeViewState> { + match self { + GitPanelViewMode::Tree(state) => Some(state), + GitPanelViewMode::Flat => None, + } + } + + fn tree_state_mut(&mut self) -> Option<&mut TreeViewState> { + match self { + GitPanelViewMode::Tree(state) => Some(state), + GitPanelViewMode::Flat => None, + } + } +} + +#[derive(Default)] +struct TreeViewState { + // Maps visible index to actual entry index. + // Length equals the number of visible entries. + // This is needed because some entries (like collapsed directories) may be hidden. + logical_indices: Vec, + expanded_dirs: HashMap, + directory_descendants: HashMap>, +} + +impl TreeViewState { + fn build_tree_entries( + &mut self, + section: Section, + mut entries: Vec, + seen_directories: &mut HashSet, + ) -> Vec<(GitListEntry, bool)> { + if entries.is_empty() { + return Vec::new(); + } + + entries.sort_by(|a, b| a.repo_path.cmp(&b.repo_path)); + + let mut root = TreeNode::default(); + for entry in entries { + let components: Vec<&str> = entry.repo_path.components().collect(); + if components.is_empty() { + root.files.push(entry); + continue; + } + + let mut current = &mut root; + let mut current_path = String::new(); + + for (ix, component) in components.iter().enumerate() { + if ix == components.len() - 1 { + current.files.push(entry.clone()); + } else { + if !current_path.is_empty() { + current_path.push('/'); + } + current_path.push_str(component); + let dir_path = RepoPath::new(¤t_path) + .expect("repo path from status entry component"); + + let component = SharedString::from(component.to_string()); + + current = current + .children + .entry(component.clone()) + .or_insert_with(|| TreeNode { + name: component, + path: Some(dir_path), + ..Default::default() + }); + } + } + } + + let (flattened, _) = self.flatten_tree(&root, section, 0, seen_directories); + flattened + } + + fn flatten_tree( + &mut self, + node: &TreeNode, + section: Section, + depth: usize, + seen_directories: &mut HashSet, + ) -> (Vec<(GitListEntry, bool)>, Vec) { + let mut all_statuses = Vec::new(); + let mut flattened = Vec::new(); + + for child in node.children.values() { + let (terminal, name) = Self::compact_directory_chain(child); + let Some(path) = terminal.path.clone().or_else(|| child.path.clone()) else { + continue; + }; + let (child_flattened, mut child_statuses) = + self.flatten_tree(terminal, section, depth + 1, seen_directories); + let key = TreeKey { section, path }; + let expanded = *self.expanded_dirs.get(&key).unwrap_or(&true); + self.expanded_dirs.entry(key.clone()).or_insert(true); + seen_directories.insert(key.clone()); + + self.directory_descendants + .insert(key.clone(), child_statuses.clone()); + + flattened.push(( + GitListEntry::Directory(GitTreeDirEntry { + key, + name, + depth, + expanded, + }), + true, + )); + + if expanded { + flattened.extend(child_flattened); + } else { + flattened.extend(child_flattened.into_iter().map(|(child, _)| (child, false))); + } + + all_statuses.append(&mut child_statuses); + } + + for file in &node.files { + all_statuses.push(file.clone()); + flattened.push(( + GitListEntry::TreeStatus(GitTreeStatusEntry { + entry: file.clone(), + depth, + }), + true, + )); + } + + (flattened, all_statuses) + } + + fn compact_directory_chain(mut node: &TreeNode) -> (&TreeNode, SharedString) { + let mut parts = vec![node.name.clone()]; + while node.files.is_empty() && node.children.len() == 1 { + let Some(child) = node.children.values().next() else { + continue; + }; + if child.path.is_none() { + break; + } + parts.push(child.name.clone()); + node = child; + } + let name = parts.join("/"); + (node, SharedString::from(name)) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +struct GitTreeStatusEntry { + entry: GitStatusEntry, + depth: usize, +} + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +struct TreeKey { + section: Section, + path: RepoPath, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +struct GitTreeDirEntry { + key: TreeKey, + name: SharedString, + depth: usize, + // staged_state: ToggleState, + expanded: bool, +} + +#[derive(Default)] +struct TreeNode { + name: SharedString, + path: Option, + children: BTreeMap, + files: Vec, +} + #[derive(Debug, PartialEq, Eq, Clone)] pub struct GitStatusEntry { pub(crate) repo_path: RepoPath, @@ -271,6 +513,69 @@ impl GitStatusEntry { } } +struct TruncatedPatch { + header: String, + hunks: Vec, + hunks_to_keep: usize, +} + +impl TruncatedPatch { + fn from_unified_diff(patch_str: &str) -> Option { + let lines: Vec<&str> = patch_str.lines().collect(); + if lines.len() < 2 { + return None; + } + let header = format!("{}\n{}\n", lines[0], lines[1]); + let mut hunks = Vec::new(); + let mut current_hunk = String::new(); + for line in &lines[2..] { + if line.starts_with("@@") { + if !current_hunk.is_empty() { + hunks.push(current_hunk); + } + current_hunk = format!("{}\n", line); + } else if !current_hunk.is_empty() { + current_hunk.push_str(line); + current_hunk.push('\n'); + } + } + if !current_hunk.is_empty() { + hunks.push(current_hunk); + } + if hunks.is_empty() { + return None; + } + let hunks_to_keep = hunks.len(); + Some(TruncatedPatch { + header, + hunks, + hunks_to_keep, + }) + } + fn calculate_size(&self) -> usize { + let mut size = self.header.len(); + for (i, hunk) in self.hunks.iter().enumerate() { + if i < self.hunks_to_keep { + size += hunk.len(); + } + } + size + } + fn to_string(&self) -> String { + let mut out = self.header.clone(); + for (i, hunk) in self.hunks.iter().enumerate() { + if i < self.hunks_to_keep { + out.push_str(hunk); + } + } + let skipped_hunks = self.hunks.len() - self.hunks_to_keep; + if skipped_hunks > 0 { + out.push_str(&format!("[...skipped {} hunks...]\n", skipped_hunks)); + } + out + } +} + pub struct GitPanel { pub(crate) active_repository: Option>, pub(crate) commit_editor: Entity, @@ -279,12 +584,15 @@ pub struct GitPanel { add_coauthors: bool, generate_commit_message_task: Option>>, entries: Vec, + view_mode: GitPanelViewMode, + entries_indices: HashMap, single_staged_entry: Option, single_tracked_entry: Option, focus_handle: FocusHandle, fs: Arc, new_count: usize, entry_count: usize, + changes_count: usize, new_staged_count: usize, pending_commit: Option>, amend_pending: bool, @@ -300,7 +608,7 @@ pub struct GitPanel { tracked_staged_count: usize, update_visible_entries_task: Task<()>, width: Option, - workspace: WeakEntity, + pub(crate) workspace: WeakEntity, context_menu: Option<(Entity, Point, Subscription)>, modal_open: bool, show_placeholders: bool, @@ -367,13 +675,19 @@ impl GitPanel { cx.on_focus(&focus_handle, window, Self::focus_in).detach(); let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; + let mut was_tree_view = GitPanelSettings::get_global(cx).tree_view; cx.observe_global_in::(window, move |this, window, cx| { - let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; - if is_sort_by_path != was_sort_by_path { - this.entries.clear(); + let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; + let tree_view = GitPanelSettings::get_global(cx).tree_view; + if tree_view != was_tree_view { + this.view_mode = GitPanelViewMode::from_settings(cx); + } + if sort_by_path != was_sort_by_path || tree_view != was_tree_view { + this.bulk_staging.take(); this.update_visible_entries(window, cx); } - was_sort_by_path = is_sort_by_path + was_sort_by_path = sort_by_path; + was_tree_view = tree_view; }) .detach(); @@ -409,17 +723,10 @@ impl GitPanel { } GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged { full_scan: true } + RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged, true, - ) => { - this.schedule_update(window, cx); - } - GitStoreEvent::RepositoryUpdated( - _, - RepositoryEvent::StatusesChanged { full_scan: false }, - true, ) | GitStoreEvent::RepositoryAdded | GitStoreEvent::RepositoryRemoved(_) => { @@ -446,10 +753,13 @@ impl GitPanel { add_coauthors: true, generate_commit_message_task: None, entries: Vec::new(), + view_mode: GitPanelViewMode::from_settings(cx), + entries_indices: HashMap::default(), focus_handle: cx.focus_handle(), fs, new_count: 0, new_staged_count: 0, + changes_count: 0, pending_commit: None, amend_pending: false, original_commit_message: None, @@ -483,70 +793,70 @@ impl GitPanel { }) } - pub fn entry_by_path(&self, path: &RepoPath, cx: &App) -> Option { - if GitPanelSettings::get_global(cx).sort_by_path { - return self - .entries - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) - .ok(); - } - - if self.conflicted_count > 0 { - let conflicted_start = 1; - if let Ok(ix) = self.entries[conflicted_start..conflicted_start + self.conflicted_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) - { - return Some(conflicted_start + ix); - } - } - if self.tracked_count > 0 { - let tracked_start = if self.conflicted_count > 0 { - 1 + self.conflicted_count - } else { - 0 - } + 1; - if let Ok(ix) = self.entries[tracked_start..tracked_start + self.tracked_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) - { - return Some(tracked_start + ix); - } - } - if self.new_count > 0 { - let untracked_start = if self.conflicted_count > 0 { - 1 + self.conflicted_count - } else { - 0 - } + if self.tracked_count > 0 { - 1 + self.tracked_count - } else { - 0 - } + 1; - if let Ok(ix) = self.entries[untracked_start..untracked_start + self.new_count] - .binary_search_by(|entry| entry.status_entry().unwrap().repo_path.cmp(path)) - { - return Some(untracked_start + ix); - } - } - None + pub fn entry_by_path(&self, path: &RepoPath) -> Option { + self.entries_indices.get(path).copied() } pub fn select_entry_by_path( &mut self, path: ProjectPath, - _: &mut Window, + window: &mut Window, cx: &mut Context, ) { let Some(git_repo) = self.active_repository.as_ref() else { return; }; - let Some(repo_path) = git_repo.read(cx).project_path_to_repo_path(&path, cx) else { - return; + + let (repo_path, section) = { + let repo = git_repo.read(cx); + let Some(repo_path) = repo.project_path_to_repo_path(&path, cx) else { + return; + }; + + let section = repo + .status_for_path(&repo_path) + .map(|status| status.status) + .map(|status| { + if repo.had_conflict_on_last_merge_head_change(&repo_path) { + Section::Conflict + } else if status.is_created() { + Section::New + } else { + Section::Tracked + } + }); + + (repo_path, section) }; - let Some(ix) = self.entry_by_path(&repo_path, cx) else { + + let mut needs_rebuild = false; + if let (Some(section), Some(tree_state)) = (section, self.view_mode.tree_state_mut()) { + let mut current_dir = repo_path.parent(); + while let Some(dir) = current_dir { + let key = TreeKey { + section, + path: RepoPath::from_rel_path(dir), + }; + + if tree_state.expanded_dirs.get(&key) == Some(&false) { + tree_state.expanded_dirs.insert(key, true); + needs_rebuild = true; + } + + current_dir = dir.parent(); + } + } + + if needs_rebuild { + self.update_visible_entries(window, cx); + } + + let Some(ix) = self.entry_by_path(&repo_path) else { return; }; + self.selected_entry = Some(ix); - cx.notify(); + self.scroll_to_selected_entry(cx); } fn serialization_key(workspace: &Workspace) -> Option { @@ -634,24 +944,98 @@ impl GitPanel { } fn scroll_to_selected_entry(&mut self, cx: &mut Context) { - if let Some(selected_entry) = self.selected_entry { + let Some(selected_entry) = self.selected_entry else { + cx.notify(); + return; + }; + + let visible_index = match &self.view_mode { + GitPanelViewMode::Flat => Some(selected_entry), + GitPanelViewMode::Tree(state) => state + .logical_indices + .iter() + .position(|&ix| ix == selected_entry), + }; + + if let Some(visible_index) = visible_index { self.scroll_handle - .scroll_to_item(selected_entry, ScrollStrategy::Center); + .scroll_to_item(visible_index, ScrollStrategy::Center); } cx.notify(); } - fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { - if !self.entries.is_empty() { - self.selected_entry = Some(1); + fn expand_selected_entry( + &mut self, + _: &ExpandSelectedEntry, + window: &mut Window, + cx: &mut Context, + ) { + let Some(entry) = self.get_selected_entry().cloned() else { + return; + }; + + if let GitListEntry::Directory(dir_entry) = entry { + if dir_entry.expanded { + self.select_next(&menu::SelectNext, window, cx); + } else { + self.toggle_directory(&dir_entry.key, window, cx); + } + } else { + self.select_next(&menu::SelectNext, window, cx); + } + } + + fn collapse_selected_entry( + &mut self, + _: &CollapseSelectedEntry, + window: &mut Window, + cx: &mut Context, + ) { + let Some(entry) = self.get_selected_entry().cloned() else { + return; + }; + + if let GitListEntry::Directory(dir_entry) = entry { + if dir_entry.expanded { + self.toggle_directory(&dir_entry.key, window, cx); + } else { + self.select_previous(&menu::SelectPrevious, window, cx); + } + } else { + self.select_previous(&menu::SelectPrevious, window, cx); + } + } + + fn select_first( + &mut self, + _: &menu::SelectFirst, + _window: &mut Window, + cx: &mut Context, + ) { + let first_entry = match &self.view_mode { + GitPanelViewMode::Flat => self + .entries + .iter() + .position(|entry| entry.status_entry().is_some()), + GitPanelViewMode::Tree(state) => { + let index = self.entries.iter().position(|entry| { + entry.status_entry().is_some() || entry.directory_entry().is_some() + }); + + index.map(|index| state.logical_indices[index]) + } + }; + + if let Some(first_entry) = first_entry { + self.selected_entry = Some(first_entry); self.scroll_to_selected_entry(cx); } } fn select_previous( &mut self, - _: &SelectPrevious, + _: &menu::SelectPrevious, _window: &mut Window, cx: &mut Context, ) { @@ -660,80 +1044,142 @@ impl GitPanel { return; } - if let Some(selected_entry) = self.selected_entry { - let new_selected_entry = if selected_entry > 0 { - selected_entry - 1 - } else { - selected_entry - }; + let Some(selected_entry) = self.selected_entry else { + return; + }; - if matches!( - self.entries.get(new_selected_entry), - Some(GitListEntry::Header(..)) - ) { - if new_selected_entry > 0 { - self.selected_entry = Some(new_selected_entry - 1) - } - } else { - self.selected_entry = Some(new_selected_entry); + let new_index = match &self.view_mode { + GitPanelViewMode::Flat => selected_entry.saturating_sub(1), + GitPanelViewMode::Tree(state) => { + let Some(current_logical_index) = state + .logical_indices + .iter() + .position(|&i| i == selected_entry) + else { + return; + }; + + state.logical_indices[current_logical_index.saturating_sub(1)] } + }; - self.scroll_to_selected_entry(cx); + if selected_entry == 0 && new_index == 0 { + return; } - cx.notify(); + if matches!( + self.entries.get(new_index.saturating_sub(1)), + Some(GitListEntry::Header(..)) + ) && new_index == 0 + { + return; + } + + if matches!(self.entries.get(new_index), Some(GitListEntry::Header(..))) { + self.selected_entry = Some(new_index.saturating_sub(1)); + } else { + self.selected_entry = Some(new_index); + } + + self.scroll_to_selected_entry(cx); } - fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context) { + fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context) { let item_count = self.entries.len(); if item_count == 0 { return; } - if let Some(selected_entry) = self.selected_entry { - let new_selected_entry = if selected_entry < item_count - 1 { - selected_entry + 1 - } else { - selected_entry - }; - if matches!( - self.entries.get(new_selected_entry), - Some(GitListEntry::Header(..)) - ) { - self.selected_entry = Some(new_selected_entry + 1); - } else { - self.selected_entry = Some(new_selected_entry); + let Some(selected_entry) = self.selected_entry else { + return; + }; + + if selected_entry == item_count - 1 { + return; + } + + let new_index = match &self.view_mode { + GitPanelViewMode::Flat => selected_entry.saturating_add(1), + GitPanelViewMode::Tree(state) => { + let Some(current_logical_index) = state + .logical_indices + .iter() + .position(|&i| i == selected_entry) + else { + return; + }; + + state.logical_indices[current_logical_index.saturating_add(1)] } + }; - self.scroll_to_selected_entry(cx); + if matches!(self.entries.get(new_index), Some(GitListEntry::Header(..))) { + self.selected_entry = Some(new_index.saturating_add(1)); + } else { + self.selected_entry = Some(new_index); } - cx.notify(); + self.scroll_to_selected_entry(cx); } - fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { if self.entries.last().is_some() { self.selected_entry = Some(self.entries.len() - 1); self.scroll_to_selected_entry(cx); } } + /// Show diff view at selected entry, only if the diff view is open + fn move_diff_to_entry(&mut self, window: &mut Window, cx: &mut Context) { + maybe!({ + let workspace = self.workspace.upgrade()?; + + if let Some(project_diff) = workspace.read(cx).item_of_type::(cx) { + let entry = self.entries.get(self.selected_entry?)?.status_entry()?; + + project_diff.update(cx, |project_diff, cx| { + project_diff.move_to_entry(entry.clone(), window, cx); + }); + } + + Some(()) + }); + } + + fn first_entry(&mut self, _: &FirstEntry, window: &mut Window, cx: &mut Context) { + self.select_first(&menu::SelectFirst, window, cx); + self.move_diff_to_entry(window, cx); + } + + fn last_entry(&mut self, _: &LastEntry, window: &mut Window, cx: &mut Context) { + self.select_last(&menu::SelectLast, window, cx); + self.move_diff_to_entry(window, cx); + } + + fn next_entry(&mut self, _: &NextEntry, window: &mut Window, cx: &mut Context) { + self.select_next(&menu::SelectNext, window, cx); + self.move_diff_to_entry(window, cx); + } + + fn previous_entry(&mut self, _: &PreviousEntry, window: &mut Window, cx: &mut Context) { + self.select_previous(&menu::SelectPrevious, window, cx); + self.move_diff_to_entry(window, cx); + } + fn focus_editor(&mut self, _: &FocusEditor, window: &mut Window, cx: &mut Context) { self.commit_editor.update(cx, |editor, cx| { - window.focus(&editor.focus_handle(cx)); + window.focus(&editor.focus_handle(cx), cx); }); cx.notify(); } - fn select_first_entry_if_none(&mut self, cx: &mut Context) { + fn select_first_entry_if_none(&mut self, window: &mut Window, cx: &mut Context) { let have_entries = self .active_repository .as_ref() .is_some_and(|active_repository| active_repository.read(cx).status_summary().count > 0); if have_entries && self.selected_entry.is_none() { - self.selected_entry = Some(1); - self.scroll_to_selected_entry(cx); - cx.notify(); + self.select_first(&menu::SelectFirst, window, cx); } } @@ -743,10 +1189,8 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) { - self.select_first_entry_if_none(cx); - - cx.focus_self(window); - cx.notify(); + self.focus_handle.focus(window, cx); + self.select_first_entry_if_none(window, cx); } fn get_selected_entry(&self) -> Option<&GitListEntry> { @@ -767,7 +1211,7 @@ impl GitPanel { .project_path_to_repo_path(&project_path, cx) .as_ref() { - project_diff.focus_handle(cx).focus(window); + project_diff.focus_handle(cx).focus(window, cx); project_diff.update(cx, |project_diff, cx| project_diff.autoscroll(cx)); return None; }; @@ -777,15 +1221,35 @@ impl GitPanel { ProjectDiff::deploy_at(workspace, Some(entry.clone()), window, cx); }) .ok(); - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); Some(()) }); } - fn open_file( - &mut self, - _: &menu::SecondaryConfirm, + fn file_history(&mut self, _: &git::FileHistory, window: &mut Window, cx: &mut Context) { + maybe!({ + let entry = self.entries.get(self.selected_entry?)?.status_entry()?; + let active_repo = self.active_repository.as_ref()?; + let repo_path = entry.repo_path.clone(); + let git_store = self.project.read(cx).git_store(); + + FileHistoryView::open( + repo_path, + git_store.downgrade(), + active_repo.downgrade(), + self.workspace.clone(), + window, + cx, + ); + + Some(()) + }); + } + + fn open_file( + &mut self, + _: &menu::SecondaryConfirm, window: &mut Window, cx: &mut Context, ) { @@ -799,15 +1263,46 @@ impl GitPanel { return None; } - self.workspace + let open_task = self + .workspace .update(cx, |workspace, cx| { - workspace - .open_path_preview(path, None, false, false, true, window, cx) - .detach_and_prompt_err("Failed to open file", window, cx, |e, _, _| { - Some(format!("{e}")) - }); + workspace.open_path_preview(path, None, false, false, true, window, cx) }) - .ok() + .ok()?; + + cx.spawn_in(window, async move |_, mut cx| { + let item = open_task + .await + .notify_async_err(&mut cx) + .ok_or_else(|| anyhow::anyhow!("Failed to open file"))?; + if let Some(active_editor) = item.downcast::() { + if let Some(diff_task) = + active_editor.update(cx, |editor, _cx| editor.wait_for_diff_to_load())? + { + diff_task.await; + } + + cx.update(|window, cx| { + active_editor.update(cx, |editor, cx| { + editor.expand_all_diff_hunks(&ExpandAllDiffHunks, window, cx); + + let snapshot = editor.snapshot(window, cx); + editor.go_to_hunk_before_or_after_position( + &snapshot, + language::Point::new(0, 0), + Direction::Next, + window, + cx, + ); + }) + })?; + } + + anyhow::Ok(()) + }) + .detach(); + + Some(()) }); } @@ -829,14 +1324,14 @@ impl GitPanel { let prompt = window.prompt( PromptLevel::Warning, &format!( - "Are you sure you want to restore {}?", + "Are you sure you want to discard changes to {}?", entry .repo_path .file_name() .unwrap_or(entry.repo_path.display(path_style).as_ref()), ), None, - &["Restore", "Cancel"], + &["Discard Changes", "Cancel"], cx, ); cx.background_spawn(prompt) @@ -1207,6 +1702,71 @@ impl GitPanel { .detach(); } + fn stage_status_for_entry(entry: &GitStatusEntry, repo: &Repository) -> StageStatus { + // Checking for current staged/unstaged file status is a chained operation: + // 1. first, we check for any pending operation recorded in repository + // 2. if there are no pending ops either running or finished, we then ask the repository + // for the most up-to-date file status read from disk - we do this since `entry` arg to this function `render_entry` + // is likely to be staled, and may lead to weird artifacts in the form of subsecond auto-uncheck/check on + // the checkbox's state (or flickering) which is undesirable. + // 3. finally, if there is no info about this `entry` in the repo, we fall back to whatever status is encoded + // in `entry` arg. + repo.pending_ops_for_path(&entry.repo_path) + .map(|ops| { + if ops.staging() || ops.staged() { + StageStatus::Staged + } else { + StageStatus::Unstaged + } + }) + .or_else(|| { + repo.status_for_path(&entry.repo_path) + .map(|status| status.status.staging()) + }) + .unwrap_or(entry.staging) + } + + fn stage_status_for_directory( + &self, + entry: &GitTreeDirEntry, + repo: &Repository, + ) -> StageStatus { + let GitPanelViewMode::Tree(tree_state) = &self.view_mode else { + util::debug_panic!("We should never render a directory entry while in flat view mode"); + return StageStatus::Unstaged; + }; + + let Some(descendants) = tree_state.directory_descendants.get(&entry.key) else { + return StageStatus::Unstaged; + }; + + let mut fully_staged_count = 0usize; + let mut any_staged_or_partially_staged = false; + + for descendant in descendants { + match GitPanel::stage_status_for_entry(descendant, repo) { + StageStatus::Staged => { + fully_staged_count += 1; + any_staged_or_partially_staged = true; + } + StageStatus::PartiallyStaged => { + any_staged_or_partially_staged = true; + } + StageStatus::Unstaged => {} + } + } + + if descendants.is_empty() { + StageStatus::Unstaged + } else if fully_staged_count == descendants.len() { + StageStatus::Staged + } else if any_staged_or_partially_staged { + StageStatus::PartiallyStaged + } else { + StageStatus::Unstaged + } + } + pub fn stage_all(&mut self, _: &StageAll, _window: &mut Window, cx: &mut Context) { self.change_all_files_stage(true, cx); } @@ -1221,47 +1781,101 @@ impl GitPanel { _window: &mut Window, cx: &mut Context, ) { - let Some(active_repository) = self.active_repository.as_ref() else { + let Some(active_repository) = self.active_repository.clone() else { return; }; - let (stage, repo_paths) = match entry { - GitListEntry::Status(status_entry) => { - let repo_paths = vec![status_entry.clone()]; - let stage = if active_repository - .read(cx) - .pending_ops_for_path(&status_entry.repo_path) - .map(|ops| ops.staging() || ops.staged()) - .unwrap_or(status_entry.status.staging().has_staged()) - { - if let Some(op) = self.bulk_staging.clone() - && op.anchor == status_entry.repo_path - { - self.bulk_staging = None; - } - false - } else { - self.set_bulk_staging_anchor(status_entry.repo_path.clone(), cx); - true - }; - (stage, repo_paths) - } - GitListEntry::Header(section) => { - let goal_staged_state = !self.header_state(section.header).selected(); - let repository = active_repository.read(cx); - let entries = self - .entries - .iter() - .filter_map(|entry| entry.status_entry()) - .filter(|status_entry| { - section.contains(status_entry, repository) - && status_entry.staging.as_bool() != Some(goal_staged_state) - }) - .cloned() - .collect::>(); + let mut set_anchor: Option = None; + let mut clear_anchor = None; + + let (stage, repo_paths) = { + let repo = active_repository.read(cx); + match entry { + GitListEntry::Status(status_entry) => { + let repo_paths = vec![status_entry.clone()]; + let stage = match GitPanel::stage_status_for_entry(status_entry, &repo) { + StageStatus::Staged => { + if let Some(op) = self.bulk_staging.clone() + && op.anchor == status_entry.repo_path + { + clear_anchor = Some(op.anchor); + } + false + } + StageStatus::Unstaged | StageStatus::PartiallyStaged => { + set_anchor = Some(status_entry.repo_path.clone()); + true + } + }; + (stage, repo_paths) + } + GitListEntry::TreeStatus(status_entry) => { + let repo_paths = vec![status_entry.entry.clone()]; + let stage = match GitPanel::stage_status_for_entry(&status_entry.entry, &repo) { + StageStatus::Staged => { + if let Some(op) = self.bulk_staging.clone() + && op.anchor == status_entry.entry.repo_path + { + clear_anchor = Some(op.anchor); + } + false + } + StageStatus::Unstaged | StageStatus::PartiallyStaged => { + set_anchor = Some(status_entry.entry.repo_path.clone()); + true + } + }; + (stage, repo_paths) + } + GitListEntry::Header(section) => { + let goal_staged_state = !self.header_state(section.header).selected(); + let entries = self + .entries + .iter() + .filter_map(|entry| entry.status_entry()) + .filter(|status_entry| { + section.contains(status_entry, &repo) + && GitPanel::stage_status_for_entry(status_entry, &repo).as_bool() + != Some(goal_staged_state) + }) + .cloned() + .collect::>(); - (goal_staged_state, entries) + (goal_staged_state, entries) + } + GitListEntry::Directory(entry) => { + let goal_staged_state = match self.stage_status_for_directory(entry, repo) { + StageStatus::Staged => StageStatus::Unstaged, + StageStatus::Unstaged | StageStatus::PartiallyStaged => StageStatus::Staged, + }; + let goal_stage = goal_staged_state == StageStatus::Staged; + + let entries = self + .view_mode + .tree_state() + .and_then(|state| state.directory_descendants.get(&entry.key)) + .cloned() + .unwrap_or_default() + .into_iter() + .filter(|status_entry| { + GitPanel::stage_status_for_entry(status_entry, &repo) + != goal_staged_state + }) + .collect::>(); + (goal_stage, entries) + } } }; + if let Some(anchor) = clear_anchor { + if let Some(op) = self.bulk_staging.clone() + && op.anchor == anchor + { + self.bulk_staging = None; + } + } + if let Some(anchor) = set_anchor { + self.set_bulk_staging_anchor(anchor, cx); + } + self.change_file_stage(stage, repo_paths, cx); } @@ -1435,16 +2049,26 @@ impl GitPanel { } } - fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { + fn on_commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context) { + if self.commit(&self.commit_editor.focus_handle(cx), window, cx) { + telemetry::event!("Git Committed", source = "Git Panel"); + } + } + + /// Commits staged changes with the current commit message. + /// + /// Returns `true` if the commit was executed, `false` otherwise. + pub(crate) fn commit( + &mut self, + commit_editor_focus_handle: &FocusHandle, + window: &mut Window, + cx: &mut Context, + ) -> bool { if self.amend_pending { - return; + return false; } - if self - .commit_editor - .focus_handle(cx) - .contains_focused(window, cx) - { - telemetry::event!("Git Committed", source = "Git Panel"); + + if commit_editor_focus_handle.contains_focused(window, cx) { self.commit_changes( CommitOptions { amend: false, @@ -1452,24 +2076,39 @@ impl GitPanel { }, window, cx, - ) + ); + true } else { cx.propagate(); + false } } - fn amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context) { - if self - .commit_editor - .focus_handle(cx) - .contains_focused(window, cx) - { + fn on_amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context) { + if self.amend(&self.commit_editor.focus_handle(cx), window, cx) { + telemetry::event!("Git Amended", source = "Git Panel"); + } + } + + /// Amends the most recent commit with staged changes and/or an updated commit message. + /// + /// Uses a two-stage workflow where the first invocation loads the commit + /// message for editing, second invocation performs the amend. Returns + /// `true` if the amend was executed, `false` otherwise. + pub(crate) fn amend( + &mut self, + commit_editor_focus_handle: &FocusHandle, + window: &mut Window, + cx: &mut Context, + ) -> bool { + if commit_editor_focus_handle.contains_focused(window, cx) { if self.head_commit(cx).is_some() { if !self.amend_pending { self.set_amend_pending(true, cx); - self.load_last_commit_message_if_empty(cx); + self.load_last_commit_message(cx); + + return false; } else { - telemetry::event!("Git Amended", source = "Git Panel"); self.commit_changes( CommitOptions { amend: true, @@ -1478,13 +2117,16 @@ impl GitPanel { window, cx, ); + + return true; } } + return false; } else { cx.propagate(); + return false; } } - pub fn head_commit(&self, cx: &App) -> Option { self.active_repository .as_ref() @@ -1492,13 +2134,11 @@ impl GitPanel { .cloned() } - pub fn load_last_commit_message_if_empty(&mut self, cx: &mut Context) { - if !self.commit_editor.read(cx).is_empty(cx) { - return; - } + pub fn load_last_commit_message(&mut self, cx: &mut Context) { let Some(head_commit) = self.head_commit(cx) else { return; }; + let recent_sha = head_commit.sha.to_string(); let detail_task = self.load_commit_details(recent_sha, cx); cx.spawn(async move |this, cx| { @@ -1521,7 +2161,10 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) -> Option { - let git_commit_language = self.commit_editor.read(cx).language_at(0, cx); + let git_commit_language = self + .commit_editor + .read(cx) + .language_at(MultiBufferOffset(0), cx); let message = self.commit_editor.read(cx).text(cx); if message.is_empty() { return self @@ -1538,7 +2181,13 @@ impl GitPanel { let editor = cx.new(|cx| Editor::for_buffer(buffer, None, window, cx)); let wrapped_message = editor.update(cx, |editor, cx| { editor.select_all(&Default::default(), window, cx); - editor.rewrap(&Default::default(), window, cx); + editor.rewrap_impl( + RewrapOptions { + override_language_settings: false, + preserve_existing_whitespace: true, + }, + cx, + ); editor.text(cx) }); if wrapped_message.trim().is_empty() { @@ -1589,7 +2238,10 @@ impl GitPanel { let commit_message = self.custom_or_suggested_commit_message(window, cx); let Some(mut message) = commit_message else { - self.commit_editor.read(cx).focus_handle(cx).focus(window); + self.commit_editor + .read(cx) + .focus_handle(cx) + .focus(window, cx); return; }; @@ -1631,11 +2283,16 @@ impl GitPanel { let result = task.await; this.update_in(cx, |this, window, cx| { this.pending_commit.take(); + match result { Ok(()) => { - this.commit_editor - .update(cx, |editor, cx| editor.clear(window, cx)); - this.original_commit_message = None; + if options.amend { + this.set_amend_pending(false, cx); + } else { + this.commit_editor + .update(cx, |editor, cx| editor.clear(window, cx)); + this.original_commit_message = None; + } } Err(e) => this.show_error_toast("commit", e, cx), } @@ -1644,9 +2301,6 @@ impl GitPanel { }); self.pending_commit = Some(task); - if options.amend { - self.set_amend_pending(false, cx); - } } pub(crate) fn uncommit(&mut self, window: &mut Window, cx: &mut Context) { @@ -1781,6 +2435,171 @@ impl GitPanel { self.generate_commit_message(cx); } + fn split_patch(patch: &str) -> Vec { + let mut result = Vec::new(); + let mut current_patch = String::new(); + + for line in patch.lines() { + if line.starts_with("---") && !current_patch.is_empty() { + result.push(current_patch.trim_end_matches('\n').into()); + current_patch = String::new(); + } + current_patch.push_str(line); + current_patch.push('\n'); + } + + if !current_patch.is_empty() { + result.push(current_patch.trim_end_matches('\n').into()); + } + + result + } + fn truncate_iteratively(patch: &str, max_bytes: usize) -> String { + let mut current_size = patch.len(); + if current_size <= max_bytes { + return patch.to_string(); + } + let file_patches = Self::split_patch(patch); + let mut file_infos: Vec = file_patches + .iter() + .filter_map(|patch| TruncatedPatch::from_unified_diff(patch)) + .collect(); + + if file_infos.is_empty() { + return patch.to_string(); + } + + current_size = file_infos.iter().map(|f| f.calculate_size()).sum::(); + while current_size > max_bytes { + let file_idx = file_infos + .iter() + .enumerate() + .filter(|(_, f)| f.hunks_to_keep > 1) + .max_by_key(|(_, f)| f.hunks_to_keep) + .map(|(idx, _)| idx); + match file_idx { + Some(idx) => { + let file = &mut file_infos[idx]; + let size_before = file.calculate_size(); + file.hunks_to_keep -= 1; + let size_after = file.calculate_size(); + let saved = size_before.saturating_sub(size_after); + current_size = current_size.saturating_sub(saved); + } + None => { + break; + } + } + } + + file_infos + .iter() + .map(|info| info.to_string()) + .collect::>() + .join("\n") + } + + pub fn compress_commit_diff(diff_text: &str, max_bytes: usize) -> String { + if diff_text.len() <= max_bytes { + return diff_text.to_string(); + } + + let mut compressed = diff_text + .lines() + .map(|line| { + if line.len() > 256 { + format!("{}...[truncated]\n", &line[..line.floor_char_boundary(256)]) + } else { + format!("{}\n", line) + } + }) + .collect::>() + .join(""); + + if compressed.len() <= max_bytes { + return compressed; + } + + compressed = Self::truncate_iteratively(&compressed, max_bytes); + + compressed + } + + async fn load_project_rules( + project: &Entity, + repo_work_dir: &Arc, + cx: &mut AsyncApp, + ) -> Option { + let rules_path = cx + .update(|cx| { + for worktree in project.read(cx).worktrees(cx) { + let worktree_abs_path = worktree.read(cx).abs_path(); + if !worktree_abs_path.starts_with(&repo_work_dir) { + continue; + } + + let worktree_snapshot = worktree.read(cx).snapshot(); + for rules_name in RULES_FILE_NAMES { + if let Ok(rel_path) = RelPath::unix(rules_name) { + if let Some(entry) = worktree_snapshot.entry_for_path(rel_path) { + if entry.is_file() { + return Some(ProjectPath { + worktree_id: worktree.read(cx).id(), + path: entry.path.clone(), + }); + } + } + } + } + } + None + }) + .ok()??; + + let buffer = project + .update(cx, |project, cx| project.open_buffer(rules_path, cx)) + .ok()? + .await + .ok()?; + + let content = buffer + .read_with(cx, |buffer, _| buffer.text()) + .ok()? + .trim() + .to_string(); + + if content.is_empty() { + None + } else { + Some(content) + } + } + + async fn load_commit_message_prompt( + is_using_legacy_zed_pro: bool, + cx: &mut AsyncApp, + ) -> String { + const DEFAULT_PROMPT: &str = include_str!("commit_message_prompt.txt"); + + // Remove this once we stop supporting legacy Zed Pro + // In legacy Zed Pro, Git commit summary generation did not count as a + // prompt. If the user changes the prompt, our classification will fail, + // meaning that users will be charged for generating commit messages. + if is_using_legacy_zed_pro { + return DEFAULT_PROMPT.to_string(); + } + + let load = async { + let store = cx.update(|cx| PromptStore::global(cx)).ok()?.await.ok()?; + store + .update(cx, |s, cx| s.load(PromptId::CommitMessage, cx)) + .ok()? + .await + .ok() + }; + load.await.unwrap_or_else(|| DEFAULT_PROMPT.to_string()) + } + /// Generates a commit message using an LLM. pub fn generate_commit_message(&mut self, cx: &mut Context) { if !self.can_commit() || !AgentSettings::get_global(cx).enabled(cx) { @@ -1808,8 +2627,17 @@ impl GitPanel { }); let temperature = AgentSettings::temperature_for_model(&model, cx); + let project = self.project.clone(); + let repo_work_dir = repo.read(cx).work_directory_abs_path.clone(); + + // Remove this once we stop supporting legacy Zed Pro + let is_using_legacy_zed_pro = provider.id() == ZED_CLOUD_PROVIDER_ID + && self.workspace.upgrade().map_or(false, |workspace| { + workspace.read(cx).user_store().read(cx).plan() + == Some(cloud_llm_client::Plan::V1(cloud_llm_client::PlanV1::ZedPro)) + }); - self.generate_commit_message_task = Some(cx.spawn(async move |this, cx| { + self.generate_commit_message_task = Some(cx.spawn(async move |this, mut cx| { async move { let _defer = cx.on_drop(&this, |this, _cx| { this.generate_commit_message_task.take(); @@ -1839,10 +2667,12 @@ impl GitPanel { } }; - const ONE_MB: usize = 1_000_000; - if diff_text.len() > ONE_MB { - diff_text = diff_text.chars().take(ONE_MB).collect() - } + const MAX_DIFF_BYTES: usize = 20_000; + diff_text = Self::compress_commit_diff(&diff_text, MAX_DIFF_BYTES); + + let rules_content = Self::load_project_rules(&project, &repo_work_dir, &mut cx).await; + + let prompt = Self::load_commit_message_prompt(is_using_legacy_zed_pro, &mut cx).await; let subject = this.update(cx, |this, cx| { this.commit_editor.read(cx).text(cx).lines().next().map(ToOwned::to_owned).unwrap_or_default() @@ -1850,13 +2680,23 @@ impl GitPanel { let text_empty = subject.trim().is_empty(); - let content = if text_empty { - format!("{PROMPT}\nHere are the changes in this commit:\n{diff_text}") + let rules_section = match &rules_content { + Some(rules) => format!( + "\n\nThe user has provided the following project rules that you should follow when writing the commit message:\n\ + \n{rules}\n\n" + ), + None => String::new(), + }; + + let subject_section = if text_empty { + String::new() } else { - format!("{PROMPT}\nHere is the user's subject line:\n{subject}\nHere are the changes in this commit:\n{diff_text}\n") + format!("\nHere is the user's subject line:\n{subject}") }; - const PROMPT: &str = include_str!("commit_message_prompt.txt"); + let content = format!( + "{prompt}{rules_section}{subject_section}\nHere are the changes in this commit:\n{diff_text}" + ); let request = LanguageModelRequest { thread_id: None, @@ -1867,6 +2707,7 @@ impl GitPanel { role: Role::User, content: vec![content.into()], cache: false, + reasoning_details: None, }], tools: Vec::new(), tool_choice: None, @@ -1926,7 +2767,7 @@ impl GitPanel { cx.spawn_in(window, async move |_, cx| { let repo = repo?; let remotes = repo - .update(cx, |repo, _| repo.get_remotes(None)) + .update(cx, |repo, _| repo.get_remotes(None, false)) .ok()? .await .ok()? @@ -2190,7 +3031,7 @@ impl GitPanel { }; telemetry::event!("Git Pulled"); let branch = branch.clone(); - let remote = self.get_remote(false, window, cx); + let remote = self.get_remote(false, false, window, cx); cx.spawn_in(window, async move |this, cx| { let remote = match remote.await { Ok(Some(remote)) => remote, @@ -2266,7 +3107,7 @@ impl GitPanel { _ => None, } }; - let remote = self.get_remote(select_remote, window, cx); + let remote = self.get_remote(select_remote, true, window, cx); cx.spawn_in(window, async move |this, cx| { let remote = match remote.await { @@ -2343,6 +3184,7 @@ impl GitPanel { fn get_remote( &mut self, always_select: bool, + is_push: bool, window: &mut Window, cx: &mut Context, ) -> impl Future>> + use<> { @@ -2360,7 +3202,7 @@ impl GitPanel { let current_branch = repo.branch.as_ref().context("No active branch")?; Some(current_branch.name().to_string()) }; - anyhow::Ok(repo.get_remotes(current_branch)) + anyhow::Ok(repo.get_remotes(current_branch, is_push)) })?? .await??; @@ -2483,6 +3325,29 @@ impl GitPanel { } } + fn toggle_tree_view(&mut self, _: &ToggleTreeView, _: &mut Window, cx: &mut Context) { + let current_setting = GitPanelSettings::get_global(cx).tree_view; + if let Some(workspace) = self.workspace.upgrade() { + let workspace = workspace.read(cx); + let fs = workspace.app_state().fs.clone(); + cx.update_global::(|store, _cx| { + store.update_settings_file(fs, move |settings, _cx| { + settings.git_panel.get_or_insert_default().tree_view = Some(!current_setting); + }); + }) + } + } + + fn toggle_directory(&mut self, key: &TreeKey, window: &mut Window, cx: &mut Context) { + if let Some(state) = self.view_mode.tree_state_mut() { + let expanded = state.expanded_dirs.entry(key.clone()).or_insert(true); + *expanded = !*expanded; + self.update_visible_entries(window, cx); + } else { + util::debug_panic!("Attempted to toggle directory in flat Git Panel state"); + } + } + fn fill_co_authors(&mut self, message: &mut String, cx: &mut Context) { const CO_AUTHOR_PREFIX: &str = "Co-authored-by: "; @@ -2592,27 +3457,34 @@ impl GitPanel { let bulk_staging = self.bulk_staging.take(); let last_staged_path_prev_index = bulk_staging .as_ref() - .and_then(|op| self.entry_by_path(&op.anchor, cx)); + .and_then(|op| self.entry_by_path(&op.anchor)); self.entries.clear(); + self.entries_indices.clear(); self.single_staged_entry.take(); self.single_tracked_entry.take(); self.conflicted_count = 0; self.conflicted_staged_count = 0; + self.changes_count = 0; self.new_count = 0; self.tracked_count = 0; self.new_staged_count = 0; self.tracked_staged_count = 0; self.entry_count = 0; + self.max_width_item_index = None; let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; + let is_tree_view = matches!(self.view_mode, GitPanelViewMode::Tree(_)); + let group_by_status = is_tree_view || !sort_by_path; let mut changed_entries = Vec::new(); let mut new_entries = Vec::new(); let mut conflict_entries = Vec::new(); let mut single_staged_entry = None; let mut staged_count = 0; - let mut max_width_item: Option<(RepoPath, usize)> = None; + let mut seen_directories = HashSet::default(); + let mut max_width_estimate = 0usize; + let mut max_width_item_index = None; let Some(repo) = self.active_repository.as_ref() else { // Just clear entries if no repository is active. @@ -2625,6 +3497,7 @@ impl GitPanel { self.stash_entries = repo.cached_stash(); for entry in repo.cached_status() { + self.changes_count += 1; let is_conflict = repo.had_conflict_on_last_merge_head_change(&entry.repo_path); let is_new = entry.status.is_created(); let staging = entry.status.staging(); @@ -2649,26 +3522,9 @@ impl GitPanel { single_staged_entry = Some(entry.clone()); } - let width_estimate = Self::item_width_estimate( - entry.parent_dir(path_style).map(|s| s.len()).unwrap_or(0), - entry.display_name(path_style).len(), - ); - - match max_width_item.as_mut() { - Some((repo_path, estimate)) => { - if width_estimate > *estimate { - *repo_path = entry.repo_path.clone(); - *estimate = width_estimate; - } - } - None => max_width_item = Some((entry.repo_path.clone(), width_estimate)), - } - - if sort_by_path { - changed_entries.push(entry); - } else if is_conflict { + if group_by_status && is_conflict { conflict_entries.push(entry); - } else if is_new { + } else if group_by_status && is_new { new_entries.push(entry); } else { changed_entries.push(entry); @@ -2683,26 +3539,19 @@ impl GitPanel { if ops.staged() { self.single_staged_entry = single_staged_entry; } + } else { + self.single_staged_entry = single_staged_entry; } - } else if repo - .pending_ops_by_path - .summary() - .item_summary - .staging_count - == 1 + } else if repo.pending_ops_summary().item_summary.staging_count == 1 + && let Some(ops) = repo.pending_ops().find(|ops| ops.staging()) { - self.single_staged_entry = repo.pending_ops_by_path.iter().find_map(|ops| { - if ops.staging() { - repo.status_for_path(&ops.repo_path) - .map(|status| GitStatusEntry { - repo_path: ops.repo_path.clone(), - status: status.status, - staging: StageStatus::Staged, - }) - } else { - None - } - }); + self.single_staged_entry = + repo.status_for_path(&ops.repo_path) + .map(|status| GitStatusEntry { + repo_path: ops.repo_path.clone(), + status: status.status, + staging: StageStatus::Staged, + }); } } @@ -2710,57 +3559,122 @@ impl GitPanel { self.single_tracked_entry = changed_entries.first().cloned(); } - if !conflict_entries.is_empty() { - self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::Conflict, - })); - self.entries - .extend(conflict_entries.into_iter().map(GitListEntry::Status)); - } + let mut push_entry = + |this: &mut Self, + entry: GitListEntry, + is_visible: bool, + logical_indices: Option<&mut Vec>| { + if let Some(estimate) = + this.width_estimate_for_list_entry(is_tree_view, &entry, path_style) + { + if estimate > max_width_estimate { + max_width_estimate = estimate; + max_width_item_index = Some(this.entries.len()); + } + } - if !changed_entries.is_empty() { - if !sort_by_path { - self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::Tracked, - })); - } - self.entries - .extend(changed_entries.into_iter().map(GitListEntry::Status)); - } - if !new_entries.is_empty() { - self.entries.push(GitListEntry::Header(GitHeaderEntry { - header: Section::New, - })); - self.entries - .extend(new_entries.into_iter().map(GitListEntry::Status)); - } + if let Some(repo_path) = entry.status_entry().map(|status| status.repo_path.clone()) + { + this.entries_indices.insert(repo_path, this.entries.len()); + } - if let Some((repo_path, _)) = max_width_item { - self.max_width_item_index = self.entries.iter().position(|entry| match entry { - GitListEntry::Status(git_status_entry) => git_status_entry.repo_path == repo_path, - GitListEntry::Header(_) => false, - }); + if let (Some(indices), true) = (logical_indices, is_visible) { + indices.push(this.entries.len()); + } + + this.entries.push(entry); + }; + + macro_rules! take_section_entries { + () => { + [ + (Section::Conflict, std::mem::take(&mut conflict_entries)), + (Section::Tracked, std::mem::take(&mut changed_entries)), + (Section::New, std::mem::take(&mut new_entries)), + ] + }; } - self.update_counts(repo); + match &mut self.view_mode { + GitPanelViewMode::Tree(tree_state) => { + tree_state.logical_indices.clear(); + tree_state.directory_descendants.clear(); - let bulk_staging_anchor_new_index = bulk_staging + // This is just to get around the borrow checker + // because push_entry mutably borrows self + let mut tree_state = std::mem::take(tree_state); + + for (section, entries) in take_section_entries!() { + if entries.is_empty() { + continue; + } + + push_entry( + self, + GitListEntry::Header(GitHeaderEntry { header: section }), + true, + Some(&mut tree_state.logical_indices), + ); + + for (entry, is_visible) in + tree_state.build_tree_entries(section, entries, &mut seen_directories) + { + push_entry( + self, + entry, + is_visible, + Some(&mut tree_state.logical_indices), + ); + } + } + + tree_state + .expanded_dirs + .retain(|key, _| seen_directories.contains(key)); + self.view_mode = GitPanelViewMode::Tree(tree_state); + } + GitPanelViewMode::Flat => { + for (section, entries) in take_section_entries!() { + if entries.is_empty() { + continue; + } + + if section != Section::Tracked || !sort_by_path { + push_entry( + self, + GitListEntry::Header(GitHeaderEntry { header: section }), + true, + None, + ); + } + + for entry in entries { + push_entry(self, GitListEntry::Status(entry), true, None); + } + } + } + } + + self.max_width_item_index = max_width_item_index; + + self.update_counts(repo); + + let bulk_staging_anchor_new_index = bulk_staging .as_ref() .filter(|op| op.repo_id == repo.id) - .and_then(|op| self.entry_by_path(&op.anchor, cx)); + .and_then(|op| self.entry_by_path(&op.anchor)); if bulk_staging_anchor_new_index == last_staged_path_prev_index && let Some(index) = bulk_staging_anchor_new_index && let Some(entry) = self.entries.get(index) && let Some(entry) = entry.status_entry() - && repo - .pending_ops_for_path(&entry.repo_path) - .map(|ops| ops.staging() || ops.staged()) - .unwrap_or(entry.staging.has_staged()) + && GitPanel::stage_status_for_entry(entry, &repo) + .as_bool() + .unwrap_or(false) { self.bulk_staging = bulk_staging; } - self.select_first_entry_if_none(cx); + self.select_first_entry_if_none(window, cx); let suggested_commit_message = self.suggest_commit_message(cx); let placeholder_text = suggested_commit_message.unwrap_or("Enter commit message".into()); @@ -2796,15 +3710,13 @@ impl GitPanel { self.new_staged_count = 0; self.tracked_staged_count = 0; self.entry_count = 0; - for entry in &self.entries { - let Some(status_entry) = entry.status_entry() else { - continue; - }; + + for status_entry in self.entries.iter().filter_map(|entry| entry.status_entry()) { self.entry_count += 1; - let is_staging_or_staged = repo - .pending_ops_for_path(&status_entry.repo_path) - .map(|ops| ops.staging() || ops.staged()) - .unwrap_or(status_entry.staging.has_staged()); + let is_staging_or_staged = GitPanel::stage_status_for_entry(status_entry, repo) + .as_bool() + .unwrap_or(false); + if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) { self.conflicted_count += 1; if is_staging_or_staged { @@ -2845,35 +3757,10 @@ impl GitPanel { } fn show_error_toast(&self, action: impl Into, e: anyhow::Error, cx: &mut App) { - let action = action.into(); let Some(workspace) = self.workspace.upgrade() else { return; }; - - let message = e.to_string().trim().to_string(); - if message - .matches(git::repository::REMOTE_CANCELLED_BY_USER) - .next() - .is_some() - { // Hide the cancelled by user message - } else { - workspace.update(cx, |workspace, cx| { - let workspace_weak = cx.weak_entity(); - let toast = StatusToast::new(format!("git {} failed", action), cx, |this, _cx| { - this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) - .action("View Log", move |window, cx| { - let message = message.clone(); - let action = action.clone(); - workspace_weak - .update(cx, move |workspace, cx| { - Self::open_output(action, workspace, &message, window, cx) - }) - .ok(); - }) - }); - workspace.toggle_status_toast(toast, cx) - }); - } + show_error_toast(workspace, action, e, cx) } fn show_commit_message_error(weak_this: &WeakEntity, err: &E, cx: &mut AsyncApp) @@ -2918,7 +3805,7 @@ impl GitPanel { format!("stdout:\n{}\nstderr:\n{}", output.stdout, output.stderr); workspace_weak .update(cx, move |workspace, cx| { - Self::open_output(operation, workspace, &output, window, cx) + open_output(operation, workspace, &output, window, cx) }) .ok(); }), @@ -2926,35 +3813,12 @@ impl GitPanel { .icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)) .action(text, move |_, cx| cx.open_url(&link)), } + .dismiss_button(true) }); workspace.toggle_status_toast(status_toast, cx) }); } - fn open_output( - operation: impl Into, - workspace: &mut Workspace, - output: &str, - window: &mut Window, - cx: &mut Context, - ) { - let operation = operation.into(); - let buffer = cx.new(|cx| Buffer::local(output, cx)); - buffer.update(cx, |buffer, cx| { - buffer.set_capability(language::Capability::ReadOnly, cx); - }); - let editor = cx.new(|cx| { - let mut editor = Editor::for_buffer(buffer, None, window, cx); - editor.buffer().update(cx, |buffer, cx| { - buffer.set_title(format!("Output from git {operation}"), cx); - }); - editor.set_read_only(true); - editor - }); - - workspace.add_item_to_center(Box::new(editor), window, cx); - } - pub fn can_commit(&self) -> bool { (self.has_staged_changes() || self.has_tracked_changes()) && !self.has_unstaged_conflicts() } @@ -2967,10 +3831,48 @@ impl GitPanel { self.has_staged_changes() } - // eventually we'll need to take depth into account here - // if we add a tree view - fn item_width_estimate(path: usize, file_name: usize) -> usize { - path + file_name + fn status_width_estimate( + tree_view: bool, + entry: &GitStatusEntry, + path_style: PathStyle, + depth: usize, + ) -> usize { + if tree_view { + Self::item_width_estimate(0, entry.display_name(path_style).len(), depth) + } else { + Self::item_width_estimate( + entry.parent_dir(path_style).map(|s| s.len()).unwrap_or(0), + entry.display_name(path_style).len(), + 0, + ) + } + } + + fn width_estimate_for_list_entry( + &self, + tree_view: bool, + entry: &GitListEntry, + path_style: PathStyle, + ) -> Option { + match entry { + GitListEntry::Status(status) => Some(Self::status_width_estimate( + tree_view, status, path_style, 0, + )), + GitListEntry::TreeStatus(status) => Some(Self::status_width_estimate( + tree_view, + &status.entry, + path_style, + status.depth, + )), + GitListEntry::Directory(dir) => { + Some(Self::item_width_estimate(0, dir.name.len(), dir.depth)) + } + GitListEntry::Header(_) => None, + } + } + + fn item_width_estimate(path: usize, file_name: usize, depth: usize) -> usize { + path + file_name + depth * 2 } fn render_overflow_menu(&self, id: impl Into) -> impl IntoElement { @@ -2997,6 +3899,7 @@ impl GitPanel { has_new_changes, sort_by_path: GitPanelSettings::get_global(cx).sort_by_path, has_stash_items, + tree_view: GitPanelSettings::get_global(cx).tree_view, }, window, cx, @@ -3224,23 +4127,17 @@ impl GitPanel { ) -> Option { self.active_repository.as_ref()?; - let text; - let action; - let tooltip; - if self.total_staged_count() == self.entry_count && self.entry_count > 0 { - text = "Unstage All"; - action = git::UnstageAll.boxed_clone(); - tooltip = "git reset"; - } else { - text = "Stage All"; - action = git::StageAll.boxed_clone(); - tooltip = "git add --all ." - } + let (text, action, stage, tooltip) = + if self.total_staged_count() == self.entry_count && self.entry_count > 0 { + ("Unstage All", UnstageAll.boxed_clone(), false, "git reset") + } else { + ("Stage All", StageAll.boxed_clone(), true, "git add --all") + }; - let change_string = match self.entry_count { + let change_string = match self.changes_count { 0 => "No Changes".to_string(), 1 => "1 Change".to_string(), - _ => format!("{} Changes", self.entry_count), + count => format!("{} Changes", count), }; Some( @@ -3273,11 +4170,15 @@ impl GitPanel { &self.focus_handle, )) .disabled(self.entry_count == 0) - .on_click(move |_, _, cx| { - let action = action.boxed_clone(); - cx.defer(move |cx| { - cx.dispatch_action(action.as_ref()); - }) + .on_click({ + let git_panel = cx.weak_entity(); + move |_, _, cx| { + git_panel + .update(cx, |git_panel, cx| { + git_panel.change_all_files_stage(stage, cx); + }) + .ok(); + } }), ), ), @@ -3314,7 +4215,6 @@ impl GitPanel { ) -> Option { let active_repository = self.active_repository.clone()?; let panel_editor_style = panel_editor_style(true, window, cx); - let enable_coauthors = self.render_co_authors(cx); let editor_focus_handle = self.commit_editor.focus_handle(cx); @@ -3359,7 +4259,7 @@ impl GitPanel { .border_color(cx.theme().colors().border) .cursor_text() .on_click(cx.listener(move |this, _: &ClickEvent, window, cx| { - window.focus(&this.commit_editor.focus_handle(cx)); + window.focus(&this.commit_editor.focus_handle(cx), cx); })) .child( h_flex() @@ -3570,6 +4470,7 @@ impl GitPanel { repo.clone(), workspace.clone(), None, + None, window, cx, ); @@ -3658,16 +4559,21 @@ impl GitPanel { let repo = self.active_repository.as_ref()?.read(cx); let project_path = (file.worktree_id(cx), file.path().clone()).into(); let repo_path = repo.project_path_to_repo_path(&project_path, cx)?; - let ix = self.entry_by_path(&repo_path, cx)?; + let ix = self.entry_by_path(&repo_path)?; let entry = self.entries.get(ix)?; - let is_staging_or_staged = if let Some(status_entry) = entry.status_entry() { - repo.pending_ops_for_path(&repo_path) - .map(|ops| ops.staging() || ops.staged()) - .unwrap_or(status_entry.staging.has_staged()) - } else { - false - }; + let is_staging_or_staged = repo + .pending_ops_for_path(&repo_path) + .map(|ops| ops.staging() || ops.staged()) + .or_else(|| { + repo.status_for_path(&repo_path) + .and_then(|status| status.status.staging().as_bool()) + }) + .or_else(|| { + entry + .status_entry() + .and_then(|entry| entry.staging.as_bool()) + }); let checkbox = Checkbox::new("stage-file", is_staging_or_staged.into()) .disabled(!self.has_write_access(cx)) @@ -3704,7 +4610,10 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) -> impl IntoElement { - let entry_count = self.entries.len(); + let (is_tree_view, entry_count) = match &self.view_mode { + GitPanelViewMode::Tree(state) => (true, state.logical_indices.len()), + GitPanelViewMode::Flat => (false, self.entries.len()), + }; v_flex() .flex_1() @@ -3724,10 +4633,33 @@ impl GitPanel { cx.processor(move |this, range: Range, window, cx| { let mut items = Vec::with_capacity(range.end - range.start); - for ix in range { + for ix in range.into_iter().map(|ix| match &this.view_mode { + GitPanelViewMode::Tree(state) => state.logical_indices[ix], + GitPanelViewMode::Flat => ix, + }) { match &this.entries.get(ix) { Some(GitListEntry::Status(entry)) => { - items.push(this.render_entry( + items.push(this.render_status_entry( + ix, + entry, + 0, + has_write_access, + window, + cx, + )); + } + Some(GitListEntry::TreeStatus(entry)) => { + items.push(this.render_status_entry( + ix, + &entry.entry, + entry.depth, + has_write_access, + window, + cx, + )); + } + Some(GitListEntry::Directory(entry)) => { + items.push(this.render_directory_entry( ix, entry, has_write_access, @@ -3751,14 +4683,58 @@ impl GitPanel { items }), ) + .when(is_tree_view, |list| { + let indent_size = px(TREE_INDENT); + list.with_decoration( + ui::indent_guides(indent_size, IndentGuideColors::panel(cx)) + .with_compute_indents_fn( + cx.entity(), + |this, range, _window, _cx| { + range + .map(|ix| match this.entries.get(ix) { + Some(GitListEntry::Directory(dir)) => dir.depth, + Some(GitListEntry::TreeStatus(status)) => { + status.depth + } + _ => 0, + }) + .collect() + }, + ) + .with_render_fn(cx.entity(), |_, params, _, _| { + // Magic number to align the tree item is 3 here + // because we're using 12px as the left-side padding + // and 3 makes the alignment work with the bounding box of the icon + let left_offset = px(TREE_INDENT + 3_f32); + let indent_size = params.indent_size; + let item_height = params.item_height; + + params + .indent_guides + .into_iter() + .map(|layout| { + let bounds = Bounds::new( + point( + layout.offset.x * indent_size + left_offset, + layout.offset.y * item_height, + ), + size(px(1.), layout.length * item_height), + ); + RenderedIndentGuide { + bounds, + layout, + is_active: false, + hitbox: None, + } + }) + .collect() + }), + ) + }) .size_full() .flex_grow() - .with_sizing_behavior(ListSizingBehavior::Auto) - .with_horizontal_sizing_behavior( - ListHorizontalSizingBehavior::Unconstrained, - ) .with_width_from_item(self.max_width_item_index) - .track_scroll(self.scroll_handle.clone()), + .track_scroll(&self.scroll_handle), ) .on_mouse_down( MouseButton::Right, @@ -3768,7 +4744,7 @@ impl GitPanel { ) .custom_scrollbars( Scrollbars::for_settings::() - .tracked_scroll_handle(self.scroll_handle.clone()) + .tracked_scroll_handle(&self.scroll_handle) .with_track_along( ScrollAxes::Horizontal, cx.theme().colors().panel_background, @@ -3780,7 +4756,7 @@ impl GitPanel { } fn entry_label(&self, label: impl Into, color: Color) -> Label { - Label::new(label.into()).color(color).single_line() + Label::new(label.into()).color(color) } fn list_item_height(&self) -> Rems { @@ -3802,8 +4778,8 @@ impl GitPanel { .h(self.list_item_height()) .w_full() .items_end() - .px(rems(0.75)) // ~12px - .pb(rems(0.3125)) // ~ 5px + .px_3() + .pb_1() .child( Label::new(header.title()) .color(Color::Muted) @@ -3846,23 +4822,24 @@ impl GitPanel { let restore_title = if entry.status.is_created() { "Trash File" } else { - "Restore File" + "Discard Changes" }; let context_menu = ContextMenu::build(window, cx, |context_menu, _, _| { - let mut context_menu = context_menu + let is_created = entry.status.is_created(); + context_menu .context(self.focus_handle.clone()) .action(stage_title, ToggleStaged.boxed_clone()) - .action(restore_title, git::RestoreFile::default().boxed_clone()); - - if entry.status.is_created() { - context_menu = - context_menu.action("Add to .gitignore", git::AddToGitignore.boxed_clone()); - } - - context_menu + .action(restore_title, git::RestoreFile::default().boxed_clone()) + .action_disabled_when( + !is_created, + "Add to .gitignore", + git::AddToGitignore.boxed_clone(), + ) .separator() - .action("Open Diff", Confirm.boxed_clone()) - .action("Open File", SecondaryConfirm.boxed_clone()) + .action("Open Diff", menu::Confirm.boxed_clone()) + .action("Open File", menu::SecondaryConfirm.boxed_clone()) + .separator() + .action_disabled_when(is_created, "View File History", Box::new(git::FileHistory)) }); self.selected_entry = Some(ix); self.set_context_menu(context_menu, position, window, cx); @@ -3883,6 +4860,7 @@ impl GitPanel { has_new_changes: self.new_count > 0, sort_by_path: GitPanelSettings::get_global(cx).sort_by_path, has_stash_items: self.stash_entries.entries.len() > 0, + tree_view: GitPanelSettings::get_global(cx).tree_view, }, window, cx, @@ -3914,15 +4892,18 @@ impl GitPanel { cx.notify(); } - fn render_entry( + fn render_status_entry( &self, ix: usize, entry: &GitStatusEntry, + depth: usize, has_write_access: bool, window: &Window, cx: &Context, ) -> AnyElement { + let tree_view = GitPanelSettings::get_global(cx).tree_view; let path_style = self.project.read(cx).path_style(cx); + let git_path_style = ProjectSettings::get_global(cx).git.path_style; let display_name = entry.display_name(path_style); let selected = self.selected_entry == Some(ix); @@ -3933,10 +4914,13 @@ impl GitPanel { let has_conflict = status.is_conflicted(); let is_modified = status.is_modified(); let is_deleted = status.is_deleted(); + let is_created = status.is_created(); let label_color = if status_style == StatusStyle::LabelColor { if has_conflict { Color::VersionControlConflict + } else if is_created { + Color::VersionControlAdded } else if is_modified { Color::VersionControlModified } else if is_deleted { @@ -3967,23 +4951,12 @@ impl GitPanel { .active_repository(cx) .expect("active repository must be set"); let repo = active_repo.read(cx); - // Checking for current staged/unstaged file status is a chained operation: - // 1. first, we check for any pending operation recorded in repository - // 2. if there are no pending ops either running or finished, we then ask the repository - // for the most up-to-date file status read from disk - we do this since `entry` arg to this function `render_entry` - // is likely to be staled, and may lead to weird artifacts in the form of subsecond auto-uncheck/check on - // the checkbox's state (or flickering) which is undesirable. - // 3. finally, if there is no info about this `entry` in the repo, we fall back to whatever status is encoded - // in `entry` arg. - let is_staging_or_staged = repo - .pending_ops_for_path(&entry.repo_path) - .map(|ops| ops.staging() || ops.staged()) - .or_else(|| { - repo.status_for_path(&entry.repo_path) - .map(|status| status.status.staging().has_staged()) - }) - .unwrap_or(entry.staging.has_staged()); - let mut is_staged: ToggleState = is_staging_or_staged.into(); + let stage_status = GitPanel::stage_status_for_entry(entry, &repo); + let mut is_staged: ToggleState = match stage_status { + StageStatus::Staged => ToggleState::Selected, + StageStatus::Unstaged => ToggleState::Unselected, + StageStatus::PartiallyStaged => ToggleState::Indeterminate, + }; if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() { is_staged = ToggleState::Selected; } @@ -3994,51 +4967,117 @@ impl GitPanel { let marked_bg_alpha = 0.12; let state_opacity_step = 0.04; + let info_color = cx.theme().status().info; + let base_bg = match (selected, marked) { - (true, true) => cx - .theme() - .status() - .info - .alpha(selected_bg_alpha + marked_bg_alpha), - (true, false) => cx.theme().status().info.alpha(selected_bg_alpha), - (false, true) => cx.theme().status().info.alpha(marked_bg_alpha), + (true, true) => info_color.alpha(selected_bg_alpha + marked_bg_alpha), + (true, false) => info_color.alpha(selected_bg_alpha), + (false, true) => info_color.alpha(marked_bg_alpha), _ => cx.theme().colors().ghost_element_background, }; - let hover_bg = if selected { - cx.theme() - .status() - .info - .alpha(selected_bg_alpha + state_opacity_step) + let (hover_bg, active_bg) = if selected { + ( + info_color.alpha(selected_bg_alpha + state_opacity_step), + info_color.alpha(selected_bg_alpha + state_opacity_step * 2.0), + ) } else { - cx.theme().colors().ghost_element_hover + ( + cx.theme().colors().ghost_element_hover, + cx.theme().colors().ghost_element_active, + ) }; - let active_bg = if selected { - cx.theme() - .status() - .info - .alpha(selected_bg_alpha + state_opacity_step * 2.0) - } else { - cx.theme().colors().ghost_element_active - }; + let name_row = h_flex() + .min_w_0() + .flex_1() + .gap_1() + .child(git_status_icon(status)) + .map(|this| { + if tree_view { + this.pl(px(depth as f32 * TREE_INDENT)).child( + self.entry_label(display_name, label_color) + .when(status.is_deleted(), Label::strikethrough) + .truncate(), + ) + } else { + this.child(self.path_formatted( + entry.parent_dir(path_style), + path_color, + display_name, + label_color, + path_style, + git_path_style, + status.is_deleted(), + )) + } + }); h_flex() .id(id) .h(self.list_item_height()) .w_full() - .items_center() + .pl_3() + .pr_1() + .gap_1p5() .border_1() + .border_r_2() .when(selected && self.focus_handle.is_focused(window), |el| { - el.border_color(cx.theme().colors().border_focused) + el.border_color(cx.theme().colors().panel_focused_border) }) - .px(rems(0.75)) // ~12px - .overflow_hidden() - .flex_none() - .gap_1p5() .bg(base_bg) - .hover(|this| this.bg(hover_bg)) - .active(|this| this.bg(active_bg)) + .hover(|s| s.bg(hover_bg)) + .active(|s| s.bg(active_bg)) + .child(name_row) + .child( + div() + .id(checkbox_wrapper_id) + .flex_none() + .occlude() + .cursor_pointer() + .child( + Checkbox::new(checkbox_id, is_staged) + .disabled(!has_write_access) + .fill() + .elevation(ElevationIndex::Surface) + .on_click_ext({ + let entry = entry.clone(); + let this = cx.weak_entity(); + move |_, click, window, cx| { + this.update(cx, |this, cx| { + if !has_write_access { + return; + } + if click.modifiers().shift { + this.stage_bulk(ix, cx); + } else { + let list_entry = + if GitPanelSettings::get_global(cx).tree_view { + GitListEntry::TreeStatus(GitTreeStatusEntry { + entry: entry.clone(), + depth, + }) + } else { + GitListEntry::Status(entry.clone()) + }; + this.toggle_staged_for_entry(&list_entry, window, cx); + } + cx.stop_propagation(); + }) + .ok(); + } + }) + .tooltip(move |_window, cx| { + let action = match stage_status { + StageStatus::Staged => "Unstage", + StageStatus::Unstaged | StageStatus::PartiallyStaged => "Stage", + }; + let tooltip_name = action.to_string(); + + Tooltip::for_action(tooltip_name, &ToggleStaged, cx) + }), + ), + ) .on_click({ cx.listener(move |this, event: &ClickEvent, window, cx| { this.selected_entry = Some(ix); @@ -4047,7 +5086,7 @@ impl GitPanel { this.open_file(&Default::default(), window, cx) } else { this.open_diff(&Default::default(), window, cx); - this.focus_handle.focus(window); + this.focus_handle.focus(window, cx); } }) }) @@ -4068,6 +5107,97 @@ impl GitPanel { cx.stop_propagation(); }, ) + .into_any_element() + } + + fn render_directory_entry( + &self, + ix: usize, + entry: &GitTreeDirEntry, + has_write_access: bool, + window: &Window, + cx: &Context, + ) -> AnyElement { + // TODO: Have not yet plugin the self.marked_entries. Not sure when and why we need that + let selected = self.selected_entry == Some(ix); + let label_color = Color::Muted; + + let id: ElementId = ElementId::Name(format!("dir_{}_{}", entry.name, ix).into()); + let checkbox_id: ElementId = + ElementId::Name(format!("dir_checkbox_{}_{}", entry.name, ix).into()); + let checkbox_wrapper_id: ElementId = + ElementId::Name(format!("dir_checkbox_wrapper_{}_{}", entry.name, ix).into()); + + let selected_bg_alpha = 0.08; + let state_opacity_step = 0.04; + + let info_color = cx.theme().status().info; + let colors = cx.theme().colors(); + + let (base_bg, hover_bg, active_bg) = if selected { + ( + info_color.alpha(selected_bg_alpha), + info_color.alpha(selected_bg_alpha + state_opacity_step), + info_color.alpha(selected_bg_alpha + state_opacity_step * 2.0), + ) + } else { + ( + colors.ghost_element_background, + colors.ghost_element_hover, + colors.ghost_element_active, + ) + }; + + let folder_icon = if entry.expanded { + IconName::FolderOpen + } else { + IconName::Folder + }; + + let stage_status = if let Some(repo) = &self.active_repository { + self.stage_status_for_directory(entry, repo.read(cx)) + } else { + util::debug_panic!( + "Won't have entries to render without an active repository in Git Panel" + ); + StageStatus::PartiallyStaged + }; + + let toggle_state: ToggleState = match stage_status { + StageStatus::Staged => ToggleState::Selected, + StageStatus::Unstaged => ToggleState::Unselected, + StageStatus::PartiallyStaged => ToggleState::Indeterminate, + }; + + let name_row = h_flex() + .min_w_0() + .gap_1() + .pl(px(entry.depth as f32 * TREE_INDENT)) + .child( + Icon::new(folder_icon) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(self.entry_label(entry.name.clone(), label_color).truncate()); + + h_flex() + .id(id) + .h(self.list_item_height()) + .min_w_0() + .w_full() + .pl_3() + .pr_1() + .gap_1p5() + .justify_between() + .border_1() + .border_r_2() + .when(selected && self.focus_handle.is_focused(window), |el| { + el.border_color(cx.theme().colors().panel_focused_border) + }) + .bg(base_bg) + .hover(|s| s.bg(hover_bg)) + .active(|s| s.bg(active_bg)) + .child(name_row) .child( div() .id(checkbox_wrapper_id) @@ -4075,69 +5205,85 @@ impl GitPanel { .occlude() .cursor_pointer() .child( - Checkbox::new(checkbox_id, is_staged) + Checkbox::new(checkbox_id, toggle_state) .disabled(!has_write_access) .fill() .elevation(ElevationIndex::Surface) - .on_click_ext({ + .on_click({ let entry = entry.clone(); let this = cx.weak_entity(); - move |_, click, window, cx| { + move |_, window, cx| { this.update(cx, |this, cx| { if !has_write_access { return; } - if click.modifiers().shift { - this.stage_bulk(ix, cx); - } else { - this.toggle_staged_for_entry( - &GitListEntry::Status(entry.clone()), - window, - cx, - ); - } + this.toggle_staged_for_entry( + &GitListEntry::Directory(entry.clone()), + window, + cx, + ); cx.stop_propagation(); }) .ok(); } }) .tooltip(move |_window, cx| { - let action = if is_staging_or_staged { - "Unstage" - } else { - "Stage" + let action = match stage_status { + StageStatus::Staged => "Unstage", + StageStatus::Unstaged | StageStatus::PartiallyStaged => "Stage", }; - let tooltip_name = action.to_string(); - - Tooltip::for_action(tooltip_name, &ToggleStaged, cx) + Tooltip::simple(format!("{action} folder"), cx) }), ), ) - .child(git_status_icon(status)) + .on_click({ + let key = entry.key.clone(); + cx.listener(move |this, _event: &ClickEvent, window, cx| { + this.selected_entry = Some(ix); + this.toggle_directory(&key, window, cx); + }) + }) + .into_any_element() + } + + fn path_formatted( + &self, + directory: Option, + path_color: Color, + file_name: String, + label_color: Color, + path_style: PathStyle, + git_path_style: GitPathStyle, + strikethrough: bool, + ) -> Div { + let file_name_first = git_path_style == GitPathStyle::FileNameFirst; + let file_path_first = git_path_style == GitPathStyle::FilePathFirst; + + let file_name = format!("{} ", file_name); + + h_flex() + .min_w_0() + .overflow_hidden() + .when(file_path_first, |this| this.flex_row_reverse()) .child( - h_flex() - .items_center() - .flex_1() - // .overflow_hidden() - .when_some(entry.parent_dir(path_style), |this, parent| { - if !parent.is_empty() { - this.child( - self.entry_label( - format!("{parent}{}", path_style.separator()), - path_color, - ) - .when(status.is_deleted(), |this| this.strikethrough()), - ) - } else { - this - } - }) - .child( - self.entry_label(display_name, label_color) - .when(status.is_deleted(), |this| this.strikethrough()), - ), + div().flex_none().child( + self.entry_label(file_name, label_color) + .when(strikethrough, Label::strikethrough), + ), ) - .into_any_element() + .when_some(directory, |this, dir| { + let path_name = if file_name_first { + dir + } else { + format!("{dir}{}", path_style.primary_separator()) + }; + + this.child( + self.entry_label(path_name, path_color) + .truncate() + .when(strikethrough, Label::strikethrough), + ) + }) } fn has_write_access(&self, cx: &App) -> bool { @@ -4148,6 +5294,9 @@ impl GitPanel { self.amend_pending } + /// Sets the pending amend state, ensuring that the original commit message + /// is either saved, when `value` is `true` and there's no pending amend, or + /// restored, when `value` is `false` and there's a pending amend. pub fn set_amend_pending(&mut self, value: bool, cx: &mut Context) { if value && !self.amend_pending { let current_message = self.commit_message_buffer(cx).read(cx).text(); @@ -4231,7 +5380,7 @@ impl GitPanel { let Some(op) = self.bulk_staging.as_ref() else { return; }; - let Some(mut anchor_index) = self.entry_by_path(&op.anchor, cx) else { + let Some(mut anchor_index) = self.entry_by_path(&op.anchor) else { return; }; if let Some(entry) = self.entries.get(index) @@ -4265,7 +5414,7 @@ impl GitPanel { pub(crate) fn toggle_amend_pending(&mut self, cx: &mut Context) { self.set_amend_pending(!self.amend_pending, cx); if self.amend_pending { - self.load_last_commit_message_if_empty(cx); + self.load_last_commit_message(cx); } } } @@ -4296,8 +5445,8 @@ impl Render for GitPanel { .when(has_write_access && !project.is_read_only(cx), |this| { this.on_action(cx.listener(Self::toggle_staged_for_selected)) .on_action(cx.listener(Self::stage_range)) - .on_action(cx.listener(GitPanel::commit)) - .on_action(cx.listener(GitPanel::amend)) + .on_action(cx.listener(GitPanel::on_commit)) + .on_action(cx.listener(GitPanel::on_amend)) .on_action(cx.listener(GitPanel::toggle_signoff_enabled)) .on_action(cx.listener(Self::stage_all)) .on_action(cx.listener(Self::unstage_all)) @@ -4311,13 +5460,20 @@ impl Render for GitPanel { .on_action(cx.listener(Self::stash_all)) .on_action(cx.listener(Self::stash_pop)) }) + .on_action(cx.listener(Self::collapse_selected_entry)) + .on_action(cx.listener(Self::expand_selected_entry)) .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_previous)) .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::first_entry)) + .on_action(cx.listener(Self::next_entry)) + .on_action(cx.listener(Self::previous_entry)) + .on_action(cx.listener(Self::last_entry)) .on_action(cx.listener(Self::close_panel)) .on_action(cx.listener(Self::open_diff)) .on_action(cx.listener(Self::open_file)) + .on_action(cx.listener(Self::file_history)) .on_action(cx.listener(Self::focus_changes_list)) .on_action(cx.listener(Self::focus_editor)) .on_action(cx.listener(Self::expand_commit_editor)) @@ -4325,6 +5481,7 @@ impl Render for GitPanel { git_panel.on_action(cx.listener(Self::toggle_fill_co_authors)) }) .on_action(cx.listener(Self::toggle_sort_by_path)) + .on_action(cx.listener(Self::toggle_tree_view)) .size_full() .overflow_hidden() .bg(cx.theme().colors().panel_background) @@ -4463,6 +5620,7 @@ impl GitPanelMessageTooltip { window: &mut Window, cx: &mut App, ) -> Entity { + let remote_url = repository.read(cx).default_remote_url(); cx.new(|cx| { cx.spawn_in(window, async move |this, cx| { let (details, workspace) = git_panel.update(cx, |git_panel, cx| { @@ -4472,16 +5630,21 @@ impl GitPanelMessageTooltip { ) })?; let details = details.await?; + let provider_registry = cx + .update(|_, app| GitHostingProviderRegistry::default_global(app)) + .ok(); let commit_details = crate::commit_tooltip::CommitDetails { sha: details.sha.clone(), author_name: details.author_name.clone(), author_email: details.author_email.clone(), commit_time: OffsetDateTime::from_unix_timestamp(details.commit_timestamp)?, - message: Some(ParsedCommitMessage { - message: details.message, - ..Default::default() - }), + message: Some(ParsedCommitMessage::parse( + details.sha.to_string(), + details.message.to_string(), + remote_url.as_deref(), + provider_registry, + )), }; this.update(cx, |this: &mut GitPanelMessageTooltip, cx| { @@ -4554,10 +5717,14 @@ impl RenderOnce for PanelRepoFooter { .as_ref() .map(|panel| panel.read(cx).project.clone()); - let repo = self + let (workspace, repo) = self .git_panel .as_ref() - .and_then(|panel| panel.read(cx).active_repository.clone()); + .map(|panel| { + let panel = panel.read(cx); + (panel.workspace.clone(), panel.active_repository.clone()) + }) + .unzip(); let single_repo = project .as_ref() @@ -4568,7 +5735,6 @@ impl RenderOnce for PanelRepoFooter { const MAX_REPO_LEN: usize = 16; const LABEL_CHARACTER_BUDGET: usize = MAX_BRANCH_LEN + MAX_REPO_LEN; const MAX_SHORT_SHA_LEN: usize = 8; - let branch_name = self .branch .as_ref() @@ -4646,7 +5812,11 @@ impl RenderOnce for PanelRepoFooter { }); let branch_selector = PopoverMenu::new("popover-button") - .menu(move |window, cx| Some(branch_picker::popover(repo.clone(), window, cx))) + .menu(move |window, cx| { + let workspace = workspace.clone()?; + let repo = repo.clone().flatten(); + Some(branch_picker::popover(workspace, repo, window, cx)) + }) .trigger_with_tooltip( branch_selector_button, Tooltip::for_action_title("Switch Branch", &zed_actions::git::Switch), @@ -4949,6 +6119,63 @@ impl Component for PanelRepoFooter { } } +fn open_output( + operation: impl Into, + workspace: &mut Workspace, + output: &str, + window: &mut Window, + cx: &mut Context, +) { + let operation = operation.into(); + let buffer = cx.new(|cx| Buffer::local(output, cx)); + buffer.update(cx, |buffer, cx| { + buffer.set_capability(language::Capability::ReadOnly, cx); + }); + let editor = cx.new(|cx| { + let mut editor = Editor::for_buffer(buffer, None, window, cx); + editor.buffer().update(cx, |buffer, cx| { + buffer.set_title(format!("Output from git {operation}"), cx); + }); + editor.set_read_only(true); + editor + }); + + workspace.add_item_to_center(Box::new(editor), window, cx); +} + +pub(crate) fn show_error_toast( + workspace: Entity, + action: impl Into, + e: anyhow::Error, + cx: &mut App, +) { + let action = action.into(); + let message = e.to_string().trim().to_string(); + if message + .matches(git::repository::REMOTE_CANCELLED_BY_USER) + .next() + .is_some() + { // Hide the cancelled by user message + } else { + workspace.update(cx, |workspace, cx| { + let workspace_weak = cx.weak_entity(); + let toast = StatusToast::new(format!("git {} failed", action), cx, |this, _cx| { + this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) + .action("View Log", move |window, cx| { + let message = message.clone(); + let action = action.clone(); + workspace_weak + .update(cx, move |workspace, cx| { + open_output(action, workspace, &message, window, cx) + }) + .ok(); + }) + }); + workspace.toggle_status_toast(toast, cx) + }); + } +} + #[cfg(test)] mod tests { use git::{ @@ -4956,6 +6183,7 @@ mod tests { status::{StatusCode, UnmergedStatus, UnmergedStatusCode}, }; use gpui::{TestAppContext, UpdateGlobal, VisualTestContext}; + use indoc::indoc; use project::FakeFs; use serde_json::json; use settings::SettingsStore; @@ -5579,6 +6807,94 @@ mod tests { }); } + #[gpui::test] + async fn test_amend(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "project": { + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + } + }), + ) + .await; + + fs.set_status_for_repo( + Path::new(path!("/root/project/.git")), + &[("src/main.rs", StatusCode::Modified.worktree())], + ); + + let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + // Wait for the project scanning to finish so that `head_commit(cx)` is + // actually set, otherwise no head commit would be available from which + // to fetch the latest commit message from. + cx.executor().run_until_parked(); + + let panel = workspace.update(cx, GitPanel::new).unwrap(); + panel.read_with(cx, |panel, cx| { + assert!(panel.active_repository.is_some()); + assert!(panel.head_commit(cx).is_some()); + }); + + panel.update_in(cx, |panel, window, cx| { + // Update the commit editor's message to ensure that its contents + // are later restored, after amending is finished. + panel.commit_message_buffer(cx).update(cx, |buffer, cx| { + buffer.set_text("refactor: update main.rs", cx); + }); + + // Start amending the previous commit. + panel.focus_editor(&Default::default(), window, cx); + panel.on_amend(&Amend, window, cx); + }); + + // Since `GitPanel.amend` attempts to fetch the latest commit message in + // a background task, we need to wait for it to complete before being + // able to assert that the commit message editor's state has been + // updated. + cx.run_until_parked(); + + panel.update_in(cx, |panel, window, cx| { + assert_eq!( + panel.commit_message_buffer(cx).read(cx).text(), + "initial commit" + ); + assert_eq!( + panel.original_commit_message, + Some("refactor: update main.rs".to_string()) + ); + + // Finish amending the previous commit. + panel.focus_editor(&Default::default(), window, cx); + panel.on_amend(&Amend, window, cx); + }); + + // Since the actual commit logic is run in a background task, we need to + // await its completion to actually ensure that the commit message + // editor's contents are set to the original message and haven't been + // cleared. + cx.run_until_parked(); + + panel.update_in(cx, |panel, _window, cx| { + // After amending, the commit editor's message should be restored to + // the original message. + assert_eq!( + panel.commit_message_buffer(cx).read(cx).text(), + "refactor: update main.rs" + ); + assert!(panel.original_commit_message.is_none()); + }); + } + #[gpui::test] async fn test_open_diff(cx: &mut TestAppContext) { init_test(cx); @@ -5625,7 +6941,7 @@ mod tests { // the Project Diff's active path. panel.update_in(cx, |panel, window, cx| { panel.selected_entry = Some(1); - panel.open_diff(&Confirm, window, cx); + panel.open_diff(&menu::Confirm, window, cx); }); cx.run_until_parked(); @@ -5641,6 +6957,128 @@ mod tests { }); } + #[gpui::test] + async fn test_tree_view_reveals_collapsed_parent_on_select_entry_by_path( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "src": { + "a": { + "foo.rs": "fn foo() {}", + }, + "b": { + "bar.rs": "fn bar() {}", + }, + }, + }), + ) + .await; + + fs.set_status_for_repo( + path!("/project/.git").as_ref(), + &[ + ("src/a/foo.rs", StatusCode::Modified.worktree()), + ("src/b/bar.rs", StatusCode::Modified.worktree()), + ], + ); + + let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + cx.read(|cx| { + project + .read(cx) + .worktrees(cx) + .next() + .unwrap() + .read(cx) + .as_local() + .unwrap() + .scan_complete() + }) + .await; + + cx.executor().run_until_parked(); + + cx.update(|_window, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.git_panel.get_or_insert_default().tree_view = Some(true); + }) + }); + }); + + let panel = workspace.update(cx, GitPanel::new).unwrap(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + let src_key = panel.read_with(cx, |panel, _| { + panel + .entries + .iter() + .find_map(|entry| match entry { + GitListEntry::Directory(dir) if dir.key.path == repo_path("src") => { + Some(dir.key.clone()) + } + _ => None, + }) + .expect("src directory should exist in tree view") + }); + + panel.update_in(cx, |panel, window, cx| { + panel.toggle_directory(&src_key, window, cx); + }); + + panel.read_with(cx, |panel, _| { + let state = panel + .view_mode + .tree_state() + .expect("tree view state should exist"); + assert_eq!(state.expanded_dirs.get(&src_key).copied(), Some(false)); + }); + + let worktree_id = + cx.read(|cx| project.read(cx).worktrees(cx).next().unwrap().read(cx).id()); + let project_path = ProjectPath { + worktree_id, + path: RelPath::unix("src/a/foo.rs").unwrap().into_arc(), + }; + + panel.update_in(cx, |panel, window, cx| { + panel.select_entry_by_path(project_path, window, cx); + }); + + panel.read_with(cx, |panel, _| { + let state = panel + .view_mode + .tree_state() + .expect("tree view state should exist"); + assert_eq!(state.expanded_dirs.get(&src_key).copied(), Some(true)); + + let selected_ix = panel.selected_entry.expect("selection should be set"); + assert!(state.logical_indices.contains(&selected_ix)); + + let selected_entry = panel + .entries + .get(selected_ix) + .and_then(|entry| entry.status_entry()) + .expect("selected entry should be a status entry"); + assert_eq!(selected_entry.repo_path, repo_path("src/a/foo.rs")); + }); + } + fn assert_entry_paths(entries: &[GitListEntry], expected_paths: &[Option<&str>]) { assert_eq!(entries.len(), expected_paths.len()); for (entry, expected_path) in entries.iter().zip(expected_paths) { @@ -5655,4 +7093,257 @@ mod tests { ); } } + + #[test] + fn test_compress_diff_no_truncation() { + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + -old + +new + "}; + let result = GitPanel::compress_commit_diff(diff, 1000); + assert_eq!(result, diff); + } + + #[test] + fn test_compress_diff_truncate_long_lines() { + let long_line = "🦀".repeat(300); + let diff = indoc::formatdoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + +{} + more context + ", long_line}; + let result = GitPanel::compress_commit_diff(&diff, 100); + assert!(result.contains("...[truncated]")); + assert!(result.len() < diff.len()); + } + + #[test] + fn test_compress_diff_truncate_hunks() { + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + context + -old1 + +new1 + @@ -5,2 +5,2 @@ + context 2 + -old2 + +new2 + @@ -10,2 +10,2 @@ + context 3 + -old3 + +new3 + "}; + let result = GitPanel::compress_commit_diff(diff, 100); + let expected = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + context + -old1 + +new1 + [...skipped 2 hunks...] + "}; + assert_eq!(result, expected); + } + + #[gpui::test] + async fn test_suggest_commit_message(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "tracked": "tracked\n", + "untracked": "\n", + }), + ) + .await; + + fs.set_head_and_index_for_repo( + path!("/project/.git").as_ref(), + &[("tracked", "old tracked\n".into())], + ); + + let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace.update(cx, GitPanel::new).unwrap(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + let entries = panel.read_with(cx, |panel, _| panel.entries.clone()); + + // GitPanel + // - Tracked: + // - [] tracked + // - Untracked + // - [] untracked + // + // The commit message should now read: + // "Update tracked" + let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); + assert_eq!(message, Some("Update tracked".to_string())); + + let first_status_entry = entries[1].clone(); + panel.update_in(cx, |panel, window, cx| { + panel.toggle_staged_for_entry(&first_status_entry, window, cx); + }); + + cx.read(|cx| { + project + .read(cx) + .worktrees(cx) + .next() + .unwrap() + .read(cx) + .as_local() + .unwrap() + .scan_complete() + }) + .await; + + cx.executor().run_until_parked(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + // GitPanel + // - Tracked: + // - [x] tracked + // - Untracked + // - [] untracked + // + // The commit message should still read: + // "Update tracked" + let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); + assert_eq!(message, Some("Update tracked".to_string())); + + let second_status_entry = entries[3].clone(); + panel.update_in(cx, |panel, window, cx| { + panel.toggle_staged_for_entry(&second_status_entry, window, cx); + }); + + cx.read(|cx| { + project + .read(cx) + .worktrees(cx) + .next() + .unwrap() + .read(cx) + .as_local() + .unwrap() + .scan_complete() + }) + .await; + + cx.executor().run_until_parked(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + // GitPanel + // - Tracked: + // - [x] tracked + // - Untracked + // - [x] untracked + // + // The commit message should now read: + // "Enter commit message" + // (which means we should see None returned). + let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); + assert!(message.is_none()); + + panel.update_in(cx, |panel, window, cx| { + panel.toggle_staged_for_entry(&first_status_entry, window, cx); + }); + + cx.read(|cx| { + project + .read(cx) + .worktrees(cx) + .next() + .unwrap() + .read(cx) + .as_local() + .unwrap() + .scan_complete() + }) + .await; + + cx.executor().run_until_parked(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + // GitPanel + // - Tracked: + // - [] tracked + // - Untracked + // - [x] untracked + // + // The commit message should now read: + // "Update untracked" + let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); + assert_eq!(message, Some("Create untracked".to_string())); + + panel.update_in(cx, |panel, window, cx| { + panel.toggle_staged_for_entry(&second_status_entry, window, cx); + }); + + cx.read(|cx| { + project + .read(cx) + .worktrees(cx) + .next() + .unwrap() + .read(cx) + .as_local() + .unwrap() + .scan_complete() + }) + .await; + + cx.executor().run_until_parked(); + + let handle = cx.update_window_entity(&panel, |panel, _, _| { + std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(())) + }); + cx.executor().advance_clock(2 * UPDATE_DEBOUNCE); + handle.await; + + // GitPanel + // - Tracked: + // - [] tracked + // - Untracked + // - [] untracked + // + // The commit message should now read: + // "Update tracked" + let message = panel.update(cx, |panel, cx| panel.suggest_commit_message(cx)); + assert_eq!(message, Some("Update tracked".to_string())); + } } diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index 2a6c1e8882b3f9cce02060dbf8efb6a4826b6995..6b5334e55544b465864fe3afb780c4673bb5961e 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -24,6 +24,7 @@ pub struct GitPanelSettings { pub fallback_branch_name: String, pub sort_by_path: bool, pub collapse_untracked_diff: bool, + pub tree_view: bool, } impl ScrollbarVisibility for GitPanelSettings { @@ -56,6 +57,7 @@ impl Settings for GitPanelSettings { fallback_branch_name: git_panel.fallback_branch_name.unwrap(), sort_by_path: git_panel.sort_by_path.unwrap(), collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(), + tree_view: git_panel.tree_view.unwrap(), } } } diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 5b8b721d8ecf4243faa964221cff0e7b3e678437..053c41bf10c5d97f9f5326fd17d6b5bf91297a03 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -3,6 +3,7 @@ use std::any::Any; use command_palette_hooks::CommandPaletteFilter; use commit_modal::CommitModal; use editor::{Editor, actions::DiffClipboardWithSelectionData}; +use project::ProjectPath; use ui::{ Headline, HeadlineSize, Icon, IconName, IconSize, IntoElement, ParentElement, Render, Styled, StyledExt, div, h_flex, rems, v_flex, @@ -36,6 +37,7 @@ pub mod commit_tooltip; pub mod commit_view; mod conflict_view; pub mod file_diff_view; +pub mod file_history_view; pub mod git_panel; mod git_panel_settings; pub mod onboarding; @@ -58,6 +60,7 @@ actions!( pub fn init(cx: &mut App) { editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx); commit_view::init(cx); + file_history_view::init(cx); cx.observe_new(|editor: &mut Editor, _, cx| { conflict_view::register_editor(editor, editor.buffer().clone(), cx); @@ -228,6 +231,41 @@ pub fn init(cx: &mut App) { }; }, ); + workspace.register_action(|workspace, _: &git::FileHistory, window, cx| { + let Some(active_item) = workspace.active_item(cx) else { + return; + }; + let Some(editor) = active_item.downcast::() else { + return; + }; + let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() else { + return; + }; + let Some(file) = buffer.read(cx).file() else { + return; + }; + let worktree_id = file.worktree_id(cx); + let project_path = ProjectPath { + worktree_id, + path: file.path().clone(), + }; + let project = workspace.project(); + let git_store = project.read(cx).git_store(); + let Some((repo, repo_path)) = git_store + .read(cx) + .repository_and_path_for_project_path(&project_path, cx) + else { + return; + }; + file_history_view::FileHistoryView::open( + repo_path, + git_store.downgrade(), + repo.downgrade(), + workspace.weak_handle(), + window, + cx, + ); + }); }) .detach(); } @@ -780,7 +818,7 @@ impl GitCloneModal { }); let focus_handle = repo_input.focus_handle(cx); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); Self { panel, diff --git a/crates/git_ui/src/onboarding.rs b/crates/git_ui/src/onboarding.rs index d1709e043b92216e974c1a4f451db5c28b98f773..eccb18a5400647ff86e44f4426d271d6c9361164 100644 --- a/crates/git_ui/src/onboarding.rs +++ b/crates/git_ui/src/onboarding.rs @@ -85,8 +85,8 @@ impl Render for GitOnboardingModal { git_onboarding_event!("Cancelled", trigger = "Action"); cx.emit(DismissEvent); })) - .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| { - this.focus_handle.focus(window); + .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, cx| { + this.focus_handle.focus(window, cx); })) .child( div().p_1p5().absolute().inset_0().h(px(160.)).child( diff --git a/crates/git_ui/src/picker_prompt.rs b/crates/git_ui/src/picker_prompt.rs index 6161c62af571f3a90c3110d63cc26ea3a7e032ae..14daedda61ecc71cebe8f7778fee2f8193e65a73 100644 --- a/crates/git_ui/src/picker_prompt.rs +++ b/crates/git_ui/src/picker_prompt.rs @@ -220,7 +220,7 @@ impl PickerDelegate for PickerPromptDelegate { let shortened_option = util::truncate_and_trailoff(&hit.string, self.max_match_length); Some( - ListItem::new(SharedString::from(format!("picker-prompt-menu-{ix}"))) + ListItem::new(format!("picker-prompt-menu-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 6f8195c8b718640de4fed421253d5f1bd2f8f14e..0e0632d9d049f54a648f65c55a96d639c9103e4d 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -8,7 +8,7 @@ use anyhow::{Context as _, Result, anyhow}; use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus}; use collections::{HashMap, HashSet}; use editor::{ - Addon, Editor, EditorEvent, SelectionEffects, + Addon, Editor, EditorEvent, SelectionEffects, SplittableEditor, actions::{GoToHunk, GoToPreviousHunk}, multibuffer_context_lines, scroll::Autoscroll, @@ -19,7 +19,7 @@ use git::{ status::FileStatus, }; use gpui::{ - Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity, EventEmitter, + Action, AnyElement, App, AppContext as _, AsyncWindowContext, Entity, EventEmitter, FocusHandle, Focusable, Render, Subscription, Task, WeakEntity, actions, }; use language::{Anchor, Buffer, Capability, OffsetRangeExt}; @@ -32,8 +32,8 @@ use project::{ }, }; use settings::{Settings, SettingsStore}; +use smol::future::yield_now; use std::any::{Any, TypeId}; -use std::ops::Range; use std::sync::Arc; use theme::ActiveTheme; use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider}; @@ -45,6 +45,7 @@ use workspace::{ notifications::NotifyTaskExt, searchable::SearchableItemHandle, }; +use ztracing::instrument; actions!( git, @@ -55,7 +56,8 @@ actions!( Add, /// Shows the diff between the working directory and your default /// branch (typically main or master). - BranchDiff + BranchDiff, + LeaderAndFollower, ] ); @@ -63,7 +65,7 @@ pub struct ProjectDiff { project: Entity, multibuffer: Entity, branch_diff: Entity, - editor: Entity, + editor: Entity, buffer_diff_subscriptions: HashMap, (Entity, Subscription)>, workspace: WeakEntity, focus_handle: FocusHandle, @@ -72,6 +74,13 @@ pub struct ProjectDiff { _subscription: Subscription, } +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum RefreshReason { + DiffChanged, + StatusesChanged, + EditorSaved, +} + const CONFLICT_SORT_PREFIX: u64 = 1; const TRACKED_SORT_PREFIX: u64 = 2; const NEW_SORT_PREFIX: u64 = 3; @@ -147,6 +156,10 @@ impl ProjectDiff { .items_of_type::(cx) .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Head)); let project_diff = if let Some(existing) = existing { + existing.update(cx, |project_diff, cx| { + project_diff.move_to_beginning(window, cx); + }); + workspace.activate_item(&existing, true, true, window, cx); existing } else { @@ -171,7 +184,9 @@ impl ProjectDiff { pub fn autoscroll(&self, cx: &mut Context) { self.editor.update(cx, |editor, cx| { - editor.request_autoscroll(Autoscroll::fit(), cx); + editor.primary_editor().update(cx, |editor, cx| { + editor.request_autoscroll(Autoscroll::fit(), cx); + }) }) } @@ -225,44 +240,44 @@ impl ProjectDiff { cx: &mut Context, ) -> Self { let focus_handle = cx.focus_handle(); - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_all_diff_hunks_expanded(cx); + multibuffer + }); let editor = cx.new(|cx| { - let mut diff_display_editor = - Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx); - diff_display_editor.disable_diagnostics(cx); - diff_display_editor.set_expand_all_diff_hunks(cx); - - match branch_diff.read(cx).diff_base() { - DiffBase::Head => { - diff_display_editor.register_addon(GitPanelAddon { - workspace: workspace.downgrade(), - }); - } - DiffBase::Merge { .. } => { - diff_display_editor.register_addon(BranchDiffAddon { - branch_diff: branch_diff.clone(), - }); - diff_display_editor.start_temporary_diff_override(); - diff_display_editor.set_render_diff_hunk_controls( - Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()), - cx, - ); - // - } - } + let diff_display_editor = SplittableEditor::new_unsplit( + multibuffer.clone(), + project.clone(), + workspace.clone(), + window, + cx, + ); diff_display_editor - }); - window.defer(cx, { - let workspace = workspace.clone(); - let editor = editor.clone(); - move |window, cx| { - workspace.update(cx, |workspace, cx| { - editor.update(cx, |editor, cx| { - editor.added_to_workspace(workspace, window, cx); - }) + .primary_editor() + .update(cx, |editor, cx| { + editor.disable_diagnostics(cx); + + match branch_diff.read(cx).diff_base() { + DiffBase::Head => { + editor.register_addon(GitPanelAddon { + workspace: workspace.downgrade(), + }); + } + DiffBase::Merge { .. } => { + editor.register_addon(BranchDiffAddon { + branch_diff: branch_diff.clone(), + }); + editor.start_temporary_diff_override(); + editor.set_render_diff_hunk_controls( + Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()), + cx, + ); + } + } }); - } + diff_display_editor }); cx.subscribe_in(&editor, window, Self::handle_editor_event) .detach(); @@ -274,7 +289,7 @@ impl ProjectDiff { BranchDiffEvent::FileListChanged => { this._task = window.spawn(cx, { let this = cx.weak_entity(); - async |cx| Self::refresh(this, cx).await + async |cx| Self::refresh(this, RefreshReason::StatusesChanged, cx).await }) } }, @@ -293,7 +308,7 @@ impl ProjectDiff { this._task = { window.spawn(cx, { let this = cx.weak_entity(); - async |cx| Self::refresh(this, cx).await + async |cx| Self::refresh(this, RefreshReason::StatusesChanged, cx).await }) } } @@ -304,7 +319,7 @@ impl ProjectDiff { let task = window.spawn(cx, { let this = cx.weak_entity(); - async |cx| Self::refresh(this, cx).await + async |cx| Self::refresh(this, RefreshReason::StatusesChanged, cx).await }); Self { @@ -342,7 +357,7 @@ impl ProjectDiff { } pub fn active_path(&self, cx: &App) -> Option { - let editor = self.editor.read(cx); + let editor = self.editor.read(cx).last_selected_editor().read(cx); let position = editor.selections.newest_anchor().head(); let multi_buffer = editor.buffer().read(cx); let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?; @@ -354,17 +369,27 @@ impl ProjectDiff { }) } + fn move_to_beginning(&mut self, window: &mut Window, cx: &mut Context) { + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |editor, cx| { + editor.move_to_beginning(&Default::default(), window, cx); + }); + }); + } + fn move_to_path(&mut self, path_key: PathKey, window: &mut Window, cx: &mut Context) { if let Some(position) = self.multibuffer.read(cx).location_for_path(&path_key, cx) { self.editor.update(cx, |editor, cx| { - editor.change_selections( - SelectionEffects::scroll(Autoscroll::focused()), - window, - cx, - |s| { - s.select_ranges([position..position]); - }, - ) + editor.primary_editor().update(cx, |editor, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::focused()), + window, + cx, + |s| { + s.select_ranges([position..position]); + }, + ) + }) }); } else { self.pending_scroll = Some(path_key); @@ -372,7 +397,7 @@ impl ProjectDiff { } fn button_states(&self, cx: &App) -> ButtonStates { - let editor = self.editor.read(cx); + let editor = self.editor.read(cx).primary_editor().read(cx); let snapshot = self.multibuffer.read(cx).snapshot(cx); let prev_next = snapshot.diff_hunks().nth(1).is_some(); let mut selection = true; @@ -383,12 +408,14 @@ impl ProjectDiff { .collect::>(); if !ranges.iter().any(|range| range.start != range.end) { selection = false; - if let Some((excerpt_id, buffer, range)) = self.editor.read(cx).active_excerpt(cx) { - ranges = vec![multi_buffer::Anchor::range_in_buffer( - excerpt_id, - buffer.read(cx).remote_id(), - range, - )]; + if let Some((excerpt_id, _, range)) = self + .editor + .read(cx) + .primary_editor() + .read(cx) + .active_excerpt(cx) + { + ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)]; } else { ranges = Vec::default(); } @@ -435,32 +462,41 @@ impl ProjectDiff { fn handle_editor_event( &mut self, - editor: &Entity, + editor: &Entity, event: &EditorEvent, window: &mut Window, cx: &mut Context, ) { - if let EditorEvent::SelectionsChanged { local: true } = event { - let Some(project_path) = self.active_path(cx) else { - return; - }; - self.workspace - .update(cx, |workspace, cx| { - if let Some(git_panel) = workspace.panel::(cx) { - git_panel.update(cx, |git_panel, cx| { - git_panel.select_entry_by_path(project_path, window, cx) - }) - } - }) - .ok(); + match event { + EditorEvent::SelectionsChanged { local: true } => { + let Some(project_path) = self.active_path(cx) else { + return; + }; + self.workspace + .update(cx, |workspace, cx| { + if let Some(git_panel) = workspace.panel::(cx) { + git_panel.update(cx, |git_panel, cx| { + git_panel.select_entry_by_path(project_path, window, cx) + }) + } + }) + .ok(); + } + EditorEvent::Saved => { + self._task = cx.spawn_in(window, async move |this, cx| { + Self::refresh(this, RefreshReason::EditorSaved, cx).await + }); + } + _ => {} } if editor.focus_handle(cx).contains_focused(window, cx) && self.multibuffer.read(cx).is_empty() { - self.focus_handle.focus(window) + self.focus_handle.focus(window, cx) } } + #[instrument(skip_all)] fn register_buffer( &mut self, path_key: PathKey, @@ -473,33 +509,47 @@ impl ProjectDiff { let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| { this._task = window.spawn(cx, { let this = cx.weak_entity(); - async |cx| Self::refresh(this, cx).await + async |cx| Self::refresh(this, RefreshReason::DiffChanged, cx).await }) }); self.buffer_diff_subscriptions .insert(path_key.path.clone(), (diff.clone(), subscription)); + // TODO(split-diff) we shouldn't have a conflict addon when split let conflict_addon = self .editor .read(cx) + .primary_editor() + .read(cx) .addon::() .expect("project diff editor should have a conflict addon"); let snapshot = buffer.read(cx).snapshot(); let diff_read = diff.read(cx); - let diff_hunk_ranges = diff_read - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx) - .map(|diff_hunk| diff_hunk.buffer_range); - let conflicts = conflict_addon - .conflict_set(snapshot.remote_id()) - .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts) - .unwrap_or_default(); - let conflicts = conflicts.iter().map(|conflict| conflict.range.clone()); - - let excerpt_ranges = - merge_anchor_ranges(diff_hunk_ranges.into_iter(), conflicts, &snapshot) - .map(|range| range.to_point(&snapshot)) - .collect::>(); + + let excerpt_ranges = { + let diff_hunk_ranges = diff_read + .hunks_intersecting_range( + Anchor::min_max_range_for_buffer(diff_read.buffer_id), + &snapshot, + cx, + ) + .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)); + let conflicts = conflict_addon + .conflict_set(snapshot.remote_id()) + .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts) + .unwrap_or_default(); + let mut conflicts = conflicts + .iter() + .map(|conflict| conflict.range.to_point(&snapshot)) + .peekable(); + + if conflicts.peek().is_some() { + conflicts.collect::>() + } else { + diff_hunk_ranges.collect() + } + }; let (was_empty, is_excerpt_newly_added) = self.multibuffer.update(cx, |multibuffer, cx| { let was_empty = multibuffer.is_empty(); @@ -517,19 +567,27 @@ impl ProjectDiff { }); self.editor.update(cx, |editor, cx| { - if was_empty { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - // TODO select the very beginning (possibly inside a deletion) - selections.select_ranges([0..0]) - }); - } - if is_excerpt_newly_added - && (file_status.is_deleted() - || (file_status.is_untracked() - && GitPanelSettings::get_global(cx).collapse_untracked_diff)) - { - editor.fold_buffer(snapshot.text.remote_id(), cx) - } + editor.primary_editor().update(cx, |editor, cx| { + if was_empty { + editor.change_selections( + SelectionEffects::no_scroll(), + window, + cx, + |selections| { + selections.select_ranges([ + multi_buffer::Anchor::min()..multi_buffer::Anchor::min() + ]) + }, + ); + } + if is_excerpt_newly_added + && (file_status.is_deleted() + || (file_status.is_untracked() + && GitPanelSettings::get_global(cx).collapse_untracked_diff)) + { + editor.fold_buffer(snapshot.text.remote_id(), cx) + } + }) }); if self.multibuffer.read(cx).is_empty() @@ -539,10 +597,10 @@ impl ProjectDiff { .focus_handle(cx) .contains_focused(window, cx) { - self.focus_handle.focus(window); + self.focus_handle.focus(window, cx); } else if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() { self.editor.update(cx, |editor, cx| { - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); }); } if self.pending_scroll.as_ref() == Some(&path_key) { @@ -550,14 +608,23 @@ impl ProjectDiff { } } - pub async fn refresh(this: WeakEntity, cx: &mut AsyncWindowContext) -> Result<()> { + pub async fn refresh( + this: WeakEntity, + reason: RefreshReason, + cx: &mut AsyncWindowContext, + ) -> Result<()> { let mut path_keys = Vec::new(); let buffers_to_load = this.update(cx, |this, cx| { let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| { let load_buffers = branch_diff.load_buffers(cx); (branch_diff.repo().cloned(), load_buffers) }); - let mut previous_paths = this.multibuffer.read(cx).paths().collect::>(); + let mut previous_paths = this + .multibuffer + .read(cx) + .paths() + .cloned() + .collect::>(); if let Some(repo) = repo { let repo = repo.read(cx); @@ -574,8 +641,20 @@ impl ProjectDiff { this.multibuffer.update(cx, |multibuffer, cx| { for path in previous_paths { + if let Some(buffer) = multibuffer.buffer_for_path(&path, cx) { + let skip = match reason { + RefreshReason::DiffChanged | RefreshReason::EditorSaved => { + buffer.read(cx).is_dirty() + } + RefreshReason::StatusesChanged => false, + }; + if skip { + continue; + } + } + this.buffer_diff_subscriptions.remove(&path.path); - multibuffer.remove_excerpts_for_path(path, cx); + multibuffer.remove_excerpts_for_path(path.clone(), cx); } }); buffers_to_load @@ -583,9 +662,32 @@ impl ProjectDiff { for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) { if let Some((buffer, diff)) = entry.load.await.log_err() { + // We might be lagging behind enough that all future entry.load futures are no longer pending. + // If that is the case, this task will never yield, starving the foreground thread of execution time. + yield_now().await; cx.update(|window, cx| { this.update(cx, |this, cx| { - this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx) + let multibuffer = this.multibuffer.read(cx); + let skip = multibuffer.buffer(buffer.read(cx).remote_id()).is_some() + && multibuffer + .diff_for(buffer.read(cx).remote_id()) + .is_some_and(|prev_diff| prev_diff.entity_id() == diff.entity_id()) + && match reason { + RefreshReason::DiffChanged | RefreshReason::EditorSaved => { + buffer.read(cx).is_dirty() + } + RefreshReason::StatusesChanged => false, + }; + if !skip { + this.register_buffer( + path_key, + entry.file_status, + buffer, + diff, + window, + cx, + ) + } }) .ok(); })?; @@ -603,14 +705,17 @@ impl ProjectDiff { pub fn excerpt_paths(&self, cx: &App) -> Vec> { self.multibuffer .read(cx) - .excerpt_paths() + .paths() .map(|key| key.path.clone()) .collect() } } fn sort_prefix(repo: &Repository, repo_path: &RepoPath, status: FileStatus, cx: &App) -> u64 { - if GitPanelSettings::get_global(cx).sort_by_path { + let settings = GitPanelSettings::get_global(cx); + + // Tree view can only sort by path + if settings.sort_by_path || settings.tree_view { TRACKED_SORT_PREFIX } else if repo.had_conflict_on_last_merge_head_change(repo_path) { CONFLICT_SORT_PREFIX @@ -645,8 +750,11 @@ impl Item for ProjectDiff { } fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { - self.editor - .update(cx, |editor, cx| editor.deactivated(window, cx)); + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |primary_editor, cx| { + primary_editor.deactivated(window, cx); + }) + }); } fn navigate( @@ -655,8 +763,11 @@ impl Item for ProjectDiff { window: &mut Window, cx: &mut Context, ) -> bool { - self.editor - .update(cx, |editor, cx| editor.navigate(data, window, cx)) + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |primary_editor, cx| { + primary_editor.navigate(data, window, cx) + }) + }) } fn tab_tooltip_text(&self, _: &App) -> Option { @@ -684,8 +795,9 @@ impl Item for ProjectDiff { Some("Project Diff Opened") } - fn as_searchable(&self, _: &Entity) -> Option> { - Some(Box::new(self.editor.clone())) + fn as_searchable(&self, _: &Entity, cx: &App) -> Option> { + // TODO(split-diff) SplitEditor should be searchable + Some(Box::new(self.editor.read(cx).primary_editor().clone())) } fn for_each_project_item( @@ -693,7 +805,11 @@ impl Item for ProjectDiff { cx: &App, f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), ) { - self.editor.for_each_project_item(cx, f) + self.editor + .read(cx) + .primary_editor() + .read(cx) + .for_each_project_item(cx, f) } fn set_nav_history( @@ -702,8 +818,10 @@ impl Item for ProjectDiff { _: &mut Window, cx: &mut Context, ) { - self.editor.update(cx, |editor, _| { - editor.set_nav_history(Some(nav_history)); + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |primary_editor, _| { + primary_editor.set_nav_history(Some(nav_history)); + }) }); } @@ -747,7 +865,11 @@ impl Item for ProjectDiff { window: &mut Window, cx: &mut Context, ) -> Task> { - self.editor.save(options, project, window, cx) + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |primary_editor, cx| { + primary_editor.save(options, project, window, cx) + }) + }) } fn save_as( @@ -766,19 +888,23 @@ impl Item for ProjectDiff { window: &mut Window, cx: &mut Context, ) -> Task> { - self.editor.reload(project, window, cx) + self.editor.update(cx, |editor, cx| { + editor.primary_editor().update(cx, |primary_editor, cx| { + primary_editor.reload(project, window, cx) + }) + }) } fn act_as_type<'a>( &'a self, type_id: TypeId, self_handle: &'a Entity, - _: &'a App, - ) -> Option { + cx: &'a App, + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) + Some(self.editor.read(cx).primary_editor().clone().into()) } else { None } @@ -789,7 +915,11 @@ impl Item for ProjectDiff { } fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { - self.editor.breadcrumbs(theme, cx) + self.editor + .read(cx) + .last_selected_editor() + .read(cx) + .breadcrumbs(theme, cx) } fn added_to_workspace( @@ -853,7 +983,7 @@ impl Render for ProjectDiff { cx, )) .on_click(move |_, window, cx| { - window.focus(&keybinding_focus_handle); + window.focus(&keybinding_focus_handle, cx); window.dispatch_action( Box::new(CloseActiveItem::default()), cx, @@ -1023,7 +1153,7 @@ impl ProjectDiffToolbar { fn dispatch_action(&self, action: &dyn Action, window: &mut Window, cx: &mut Context) { if let Some(project_diff) = self.project_diff(cx) { - project_diff.focus_handle(cx).focus(window); + project_diff.focus_handle(cx).focus(window, cx); } let action = action.boxed_clone(); cx.defer(move |cx| { @@ -1491,53 +1621,6 @@ mod preview { } } -fn merge_anchor_ranges<'a>( - left: impl 'a + Iterator>, - right: impl 'a + Iterator>, - snapshot: &'a language::BufferSnapshot, -) -> impl 'a + Iterator> { - let mut left = left.fuse().peekable(); - let mut right = right.fuse().peekable(); - - std::iter::from_fn(move || { - let Some(left_range) = left.peek() else { - return right.next(); - }; - let Some(right_range) = right.peek() else { - return left.next(); - }; - - let mut next_range = if left_range.start.cmp(&right_range.start, snapshot).is_lt() { - left.next().unwrap() - } else { - right.next().unwrap() - }; - - // Extend the basic range while there's overlap with a range from either stream. - loop { - if let Some(left_range) = left - .peek() - .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le()) - .cloned() - { - left.next(); - next_range.end = left_range.end; - } else if let Some(right_range) = right - .peek() - .filter(|range| range.start.cmp(&next_range.end, snapshot).is_le()) - .cloned() - { - right.next(); - next_range.end = right_range.end; - } else { - break; - } - } - - Some(next_range) - }) -} - struct BranchDiffAddon { branch_diff: Entity, } @@ -1624,7 +1707,7 @@ mod tests { ); cx.run_until_parked(); - let editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + let editor = diff.read_with(cx, |diff, cx| diff.editor.read(cx).primary_editor().clone()); assert_state_with_diff( &editor, cx, @@ -1635,9 +1718,13 @@ mod tests { .unindent(), ); - editor.update_in(cx, |editor, window, cx| { - editor.git_restore(&Default::default(), window, cx); - }); + editor + .update_in(cx, |editor, window, cx| { + editor.git_restore(&Default::default(), window, cx); + editor.save(SaveOptions::default(), project.clone(), window, cx) + }) + .await + .unwrap(); cx.run_until_parked(); assert_state_with_diff(&editor, cx, &"ˇ".unindent()); @@ -1680,7 +1767,7 @@ mod tests { window, cx, ); - diff.editor.clone() + diff.editor.read(cx).primary_editor().clone() }); assert_state_with_diff( &editor, @@ -1701,7 +1788,7 @@ mod tests { window, cx, ); - diff.editor.clone() + diff.editor.read(cx).primary_editor().clone() }); assert_state_with_diff( &editor, @@ -1754,7 +1841,8 @@ mod tests { ); cx.run_until_parked(); - let diff_editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + let diff_editor = + diff.read_with(cx, |diff, cx| diff.editor.read(cx).primary_editor().clone()); assert_state_with_diff( &diff_editor, @@ -1825,8 +1913,8 @@ mod tests { cx, &" - original - + ˇdifferent - " + + different + ˇ" .unindent(), ); } @@ -1878,7 +1966,7 @@ mod tests { workspace.active_item_as::(cx).unwrap() }); cx.focus(&item); - let editor = item.read_with(cx, |item, _| item.editor.clone()); + let editor = item.read_with(cx, |item, cx| item.editor.read(cx).primary_editor().clone()); let mut cx = EditorTestContext::for_editor_in(editor, cx).await; @@ -1992,7 +2080,7 @@ mod tests { workspace.active_item_as::(cx).unwrap() }); cx.focus(&item); - let editor = item.read_with(cx, |item, _| item.editor.clone()); + let editor = item.read_with(cx, |item, cx| item.editor.read(cx).primary_editor().clone()); let mut cx = EditorTestContext::for_editor_in(editor, cx).await; @@ -2039,7 +2127,7 @@ mod tests { cx.run_until_parked(); cx.update(|window, cx| { - let editor = diff.read(cx).editor.clone(); + let editor = diff.read(cx).editor.read(cx).primary_editor().clone(); let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids(); assert_eq!(excerpt_ids.len(), 1); let excerpt_id = excerpt_ids[0]; @@ -2056,6 +2144,8 @@ mod tests { .read(cx) .editor .read(cx) + .primary_editor() + .read(cx) .addon::() .unwrap() .conflict_set(buffer_id) @@ -2139,7 +2229,7 @@ mod tests { ); cx.run_until_parked(); - let editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + let editor = diff.read_with(cx, |diff, cx| diff.editor.read(cx).primary_editor().clone()); assert_state_with_diff( &editor, @@ -2250,7 +2340,7 @@ mod tests { ); cx.run_until_parked(); - let editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + let editor = diff.read_with(cx, |diff, cx| diff.editor.read(cx).primary_editor().clone()); assert_state_with_diff( &editor, @@ -2344,7 +2434,7 @@ mod tests { workspace.active_item_as::(cx).unwrap() }); cx.focus(&item); - let editor = item.read_with(cx, |item, _| item.editor.clone()); + let editor = item.read_with(cx, |item, cx| item.editor.read(cx).primary_editor().clone()); fs.set_head_and_index_for_repo( Path::new(path!("/project/.git")), diff --git a/crates/git_ui/src/remote_output.rs b/crates/git_ui/src/remote_output.rs index 8437bf0d0d37c2b2767624110fed056bbae25d05..7fe863ee29df20ca0f61cef5bf64cdae4b198c7a 100644 --- a/crates/git_ui/src/remote_output.rs +++ b/crates/git_ui/src/remote_output.rs @@ -1,4 +1,5 @@ use anyhow::Context as _; + use git::repository::{Remote, RemoteCommandOutput}; use linkify::{LinkFinder, LinkKind}; use ui::SharedString; diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index d25117e3806ff0bdf73985eb60ee1d8f5b373752..6d0a9d291e4a8c7096c525b9b401e54e599b0b53 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -269,6 +269,7 @@ impl StashListDelegate { repo.downgrade(), self.workspace.clone(), Some(stash_index), + None, window, cx, ); @@ -463,7 +464,7 @@ impl PickerDelegate for StashListDelegate { ); Some( - ListItem::new(SharedString::from(format!("stash-{ix}"))) + ListItem::new(format!("stash-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 28eafaf4992667966832eaadd77a2babced7d66c..56d55415ba01f893453824be00b9eb8d6bd31a90 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -5,8 +5,8 @@ use buffer_diff::{BufferDiff, BufferDiffSnapshot}; use editor::{Editor, EditorEvent, MultiBuffer, ToPoint, actions::DiffClipboardWithSelectionData}; use futures::{FutureExt, select_biased}; use gpui::{ - AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, - FocusHandle, Focusable, IntoElement, Render, Task, Window, + AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, + Focusable, IntoElement, Render, Task, Window, }; use language::{self, Buffer, Point}; use project::Project; @@ -170,7 +170,7 @@ impl TextDiffView { cx.subscribe(&source_buffer, move |this, _, event, _| match event { language::BufferEvent::Edited - | language::BufferEvent::LanguageChanged + | language::BufferEvent::LanguageChanged(_) | language::BufferEvent::Reparsed => { this.buffer_changes_tx.send(()).ok(); } @@ -329,17 +329,17 @@ impl Item for TextDiffView { type_id: TypeId, self_handle: &'a Entity, _: &'a App, - ) -> Option { + ) -> Option { if type_id == TypeId::of::() { - Some(self_handle.to_any()) + Some(self_handle.clone().into()) } else if type_id == TypeId::of::() { - Some(self.diff_editor.to_any()) + Some(self.diff_editor.clone().into()) } else { None } } - fn as_searchable(&self, _: &Entity) -> Option> { + fn as_searchable(&self, _: &Entity, _: &App) -> Option> { Some(Box::new(self.diff_editor.clone())) } @@ -446,7 +446,7 @@ impl Render for TextDiffView { #[cfg(test)] mod tests { use super::*; - use editor::test::editor_test_context::assert_state_with_diff; + use editor::{MultiBufferOffset, test::editor_test_context::assert_state_with_diff}; use gpui::{TestAppContext, VisualContext}; use project::{FakeFs, Project}; use serde_json::json; @@ -691,7 +691,11 @@ mod tests { let (unmarked_text, selection_ranges) = marked_text_ranges(editor_text, false); editor.set_text(unmarked_text, window, cx); editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(selection_ranges) + s.select_ranges( + selection_ranges + .into_iter() + .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), + ) }); editor diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index d1231b51e3a37db2b3ee2316e866fcbdbe70d459..fef5e16c80ddd26ae6dd0b2a5c0ad1d8e5b21b2c 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -1,4 +1,5 @@ use anyhow::Context as _; +use collections::HashSet; use fuzzy::StringMatchCandidate; use git::repository::Worktree as GitWorktree; @@ -9,7 +10,11 @@ use gpui::{ actions, rems, }; use picker::{Picker, PickerDelegate, PickerEditorPosition}; -use project::{DirectoryLister, git_store::Repository}; +use project::{ + DirectoryLister, + git_store::Repository, + trusted_worktrees::{PathTrust, RemoteHostLocation, TrustedWorktrees}, +}; use recent_projects::{RemoteConnectionModal, connect}; use remote::{RemoteConnectionOptions, remote_client::ConnectionIdentifier}; use std::{path::PathBuf, sync::Arc}; @@ -219,7 +224,6 @@ impl WorktreeListDelegate { window: &mut Window, cx: &mut Context>, ) { - let workspace = self.workspace.clone(); let Some(repo) = self.repo.clone() else { return; }; @@ -247,6 +251,7 @@ impl WorktreeListDelegate { let branch = worktree_branch.to_string(); let window_handle = window.window_handle(); + let workspace = self.workspace.clone(); cx.spawn_in(window, async move |_, cx| { let Some(paths) = worktree_path.await? else { return anyhow::Ok(()); @@ -257,8 +262,32 @@ impl WorktreeListDelegate { repo.create_worktree(branch.clone(), path.clone(), commit) })? .await??; - - let final_path = path.join(branch); + let new_worktree_path = path.join(branch); + + workspace.update(cx, |workspace, cx| { + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + let repo_path = &repo.read(cx).snapshot().work_directory_abs_path; + let project = workspace.project(); + if let Some((parent_worktree, _)) = + project.read(cx).find_worktree(repo_path, cx) + { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + if trusted_worktrees.can_trust(parent_worktree.read(cx).id(), cx) { + trusted_worktrees.trust( + HashSet::from_iter([PathTrust::AbsPath( + new_worktree_path.clone(), + )]), + project + .read(cx) + .remote_connection_options(cx) + .map(RemoteHostLocation::from), + cx, + ); + } + }); + } + } + })?; let (connection_options, app_state, is_local) = workspace.update(cx, |workspace, cx| { @@ -274,7 +303,7 @@ impl WorktreeListDelegate { .update_in(cx, |workspace, window, cx| { workspace.open_workspace_for_paths( replace_current_window, - vec![final_path], + vec![new_worktree_path], window, cx, ) @@ -283,7 +312,7 @@ impl WorktreeListDelegate { } else if let Some(connection_options) = connection_options { open_remote_worktree( connection_options, - vec![final_path], + vec![new_worktree_path], app_state, window_handle, replace_current_window, @@ -421,6 +450,7 @@ async fn open_remote_worktree( app_state.user_store.clone(), app_state.languages.clone(), app_state.fs.clone(), + true, cx, ) })?; @@ -665,7 +695,7 @@ impl PickerDelegate for WorktreeListDelegate { }; Some( - ListItem::new(SharedString::from(format!("worktree-menu-{ix}"))) + ListItem::new(format!("worktree-menu-{ix}")) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 286faa0b8d0185bbdb9b488fd8502cb7566dc388..042d9a46b6c76a461e60d9002a2362190e253cd4 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -1,4 +1,4 @@ -use editor::{Editor, EditorEvent, MultiBufferSnapshot}; +use editor::{Editor, EditorEvent, MBTextSummary, MultiBufferSnapshot}; use gpui::{App, Entity, FocusHandle, Focusable, Styled, Subscription, Task, WeakEntity}; use settings::{RegisterSetting, Settings}; use std::{fmt::Write, num::NonZeroU32, time::Duration}; @@ -55,7 +55,7 @@ impl UserCaretPosition { let line_start = Point::new(selection_end.row, 0); let chars_to_last_position = snapshot - .text_summary_for_range::(line_start..selection_end) + .text_summary_for_range::(line_start..selection_end) .chars as u32; (selection_end.row, chars_to_last_position) }; @@ -116,7 +116,7 @@ impl CursorPosition { for selection in editor.selections.all_adjusted(&snapshot) { let selection_summary = snapshot .buffer_snapshot() - .text_summary_for_range::( + .text_summary_for_range::( selection.start..selection.end, ); cursor_position.selected_count.characters += diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 461b0be659fc3ffb7b7bc984485dc68ece988500..7c42972a75420ae87bf3c5b9caaf041852efc009 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -268,7 +268,7 @@ impl GoToLine { cx, |s| s.select_anchor_ranges([start..start]), ); - editor.focus_handle(cx).focus(window); + editor.focus_handle(cx).focus(window, cx); cx.notify() }); self.prev_scroll_position.take(); diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index 9b7e5ec8d1c42fc846d131cfd063de5bba8287ae..b6bba48c4b04608b502932787cfcdcd429276b5b 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -229,6 +229,10 @@ pub struct GenerativeContentBlob { #[serde(rename_all = "camelCase")] pub struct FunctionCallPart { pub function_call: FunctionCall, + /// Thought signature returned by the model for function calls. + /// Only present on the first function call in parallel call scenarios. + #[serde(skip_serializing_if = "Option::is_none")] + pub thought_signature: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -480,30 +484,19 @@ impl<'de> Deserialize<'de> for ModelName { #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Eq, strum::EnumIter)] pub enum Model { - #[serde(rename = "gemini-1.5-pro")] - Gemini15Pro, - #[serde(rename = "gemini-1.5-flash-8b")] - Gemini15Flash8b, - #[serde(rename = "gemini-1.5-flash")] - Gemini15Flash, #[serde( - rename = "gemini-2.0-flash-lite", + rename = "gemini-2.5-flash-lite", + alias = "gemini-2.5-flash-lite-preview-06-17", alias = "gemini-2.0-flash-lite-preview" )] - Gemini20FlashLite, - #[serde(rename = "gemini-2.0-flash")] - Gemini20Flash, - #[serde( - rename = "gemini-2.5-flash-lite-preview", - alias = "gemini-2.5-flash-lite-preview-06-17" - )] - Gemini25FlashLitePreview, + Gemini25FlashLite, #[serde( rename = "gemini-2.5-flash", alias = "gemini-2.0-flash-thinking-exp", alias = "gemini-2.5-flash-preview-04-17", alias = "gemini-2.5-flash-preview-05-20", - alias = "gemini-2.5-flash-preview-latest" + alias = "gemini-2.5-flash-preview-latest", + alias = "gemini-2.0-flash" )] #[default] Gemini25Flash, @@ -517,6 +510,10 @@ pub enum Model { alias = "gemini-2.5-pro-preview-06-05" )] Gemini25Pro, + #[serde(rename = "gemini-3-pro-preview")] + Gemini3Pro, + #[serde(rename = "gemini-3-flash-preview")] + Gemini3Flash, #[serde(rename = "custom")] Custom { name: String, @@ -530,46 +527,37 @@ pub enum Model { impl Model { pub fn default_fast() -> Self { - Self::Gemini20FlashLite + Self::Gemini25FlashLite } pub fn id(&self) -> &str { match self { - Self::Gemini15Pro => "gemini-1.5-pro", - Self::Gemini15Flash8b => "gemini-1.5-flash-8b", - Self::Gemini15Flash => "gemini-1.5-flash", - Self::Gemini20FlashLite => "gemini-2.0-flash-lite", - Self::Gemini20Flash => "gemini-2.0-flash", - Self::Gemini25FlashLitePreview => "gemini-2.5-flash-lite-preview", + Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", + Self::Gemini3Pro => "gemini-3-pro-preview", + Self::Gemini3Flash => "gemini-3-flash-preview", Self::Custom { name, .. } => name, } } pub fn request_id(&self) -> &str { match self { - Self::Gemini15Pro => "gemini-1.5-pro", - Self::Gemini15Flash8b => "gemini-1.5-flash-8b", - Self::Gemini15Flash => "gemini-1.5-flash", - Self::Gemini20FlashLite => "gemini-2.0-flash-lite", - Self::Gemini20Flash => "gemini-2.0-flash", - Self::Gemini25FlashLitePreview => "gemini-2.5-flash-lite-preview-06-17", + Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", + Self::Gemini3Pro => "gemini-3-pro-preview", + Self::Gemini3Flash => "gemini-3-flash-preview", Self::Custom { name, .. } => name, } } pub fn display_name(&self) -> &str { match self { - Self::Gemini15Pro => "Gemini 1.5 Pro", - Self::Gemini15Flash8b => "Gemini 1.5 Flash-8b", - Self::Gemini15Flash => "Gemini 1.5 Flash", - Self::Gemini20FlashLite => "Gemini 2.0 Flash-Lite", - Self::Gemini20Flash => "Gemini 2.0 Flash", - Self::Gemini25FlashLitePreview => "Gemini 2.5 Flash-Lite Preview", + Self::Gemini25FlashLite => "Gemini 2.5 Flash-Lite", Self::Gemini25Flash => "Gemini 2.5 Flash", Self::Gemini25Pro => "Gemini 2.5 Pro", + Self::Gemini3Pro => "Gemini 3 Pro", + Self::Gemini3Flash => "Gemini 3 Flash", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -578,28 +566,22 @@ impl Model { pub fn max_token_count(&self) -> u64 { match self { - Self::Gemini15Pro => 2_097_152, - Self::Gemini15Flash8b => 1_048_576, - Self::Gemini15Flash => 1_048_576, - Self::Gemini20FlashLite => 1_048_576, - Self::Gemini20Flash => 1_048_576, - Self::Gemini25FlashLitePreview => 1_000_000, + Self::Gemini25FlashLite => 1_048_576, Self::Gemini25Flash => 1_048_576, Self::Gemini25Pro => 1_048_576, + Self::Gemini3Pro => 1_048_576, + Self::Gemini3Flash => 1_048_576, Self::Custom { max_tokens, .. } => *max_tokens, } } pub fn max_output_tokens(&self) -> Option { match self { - Model::Gemini15Pro => Some(8_192), - Model::Gemini15Flash8b => Some(8_192), - Model::Gemini15Flash => Some(8_192), - Model::Gemini20FlashLite => Some(8_192), - Model::Gemini20Flash => Some(8_192), - Model::Gemini25FlashLitePreview => Some(64_000), + Model::Gemini25FlashLite => Some(65_536), Model::Gemini25Flash => Some(65_536), Model::Gemini25Pro => Some(65_536), + Model::Gemini3Pro => Some(65_536), + Model::Gemini3Flash => Some(65_536), Model::Custom { .. } => None, } } @@ -614,18 +596,17 @@ impl Model { pub fn mode(&self) -> GoogleModelMode { match self { - Self::Gemini15Pro - | Self::Gemini15Flash8b - | Self::Gemini15Flash - | Self::Gemini20FlashLite - | Self::Gemini20Flash => GoogleModelMode::Default, - Self::Gemini25FlashLitePreview | Self::Gemini25Flash | Self::Gemini25Pro => { + Self::Gemini25FlashLite + | Self::Gemini25Flash + | Self::Gemini25Pro + | Self::Gemini3Pro => { GoogleModelMode::Thinking { // By default these models are set to "auto", so we preserve that behavior // but indicate they are capable of thinking mode budget_tokens: None, } } + Self::Gemini3Flash => GoogleModelMode::Default, Self::Custom { mode, .. } => *mode, } } @@ -636,3 +617,109 @@ impl std::fmt::Display for Model { write!(f, "{}", self.id()) } } + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_function_call_part_with_signature_serializes_correctly() { + let part = FunctionCallPart { + function_call: FunctionCall { + name: "test_function".to_string(), + args: json!({"arg": "value"}), + }, + thought_signature: Some("test_signature".to_string()), + }; + + let serialized = serde_json::to_value(&part).unwrap(); + + assert_eq!(serialized["functionCall"]["name"], "test_function"); + assert_eq!(serialized["functionCall"]["args"]["arg"], "value"); + assert_eq!(serialized["thoughtSignature"], "test_signature"); + } + + #[test] + fn test_function_call_part_without_signature_omits_field() { + let part = FunctionCallPart { + function_call: FunctionCall { + name: "test_function".to_string(), + args: json!({"arg": "value"}), + }, + thought_signature: None, + }; + + let serialized = serde_json::to_value(&part).unwrap(); + + assert_eq!(serialized["functionCall"]["name"], "test_function"); + assert_eq!(serialized["functionCall"]["args"]["arg"], "value"); + // thoughtSignature field should not be present when None + assert!(serialized.get("thoughtSignature").is_none()); + } + + #[test] + fn test_function_call_part_deserializes_with_signature() { + let json = json!({ + "functionCall": { + "name": "test_function", + "args": {"arg": "value"} + }, + "thoughtSignature": "test_signature" + }); + + let part: FunctionCallPart = serde_json::from_value(json).unwrap(); + + assert_eq!(part.function_call.name, "test_function"); + assert_eq!(part.thought_signature, Some("test_signature".to_string())); + } + + #[test] + fn test_function_call_part_deserializes_without_signature() { + let json = json!({ + "functionCall": { + "name": "test_function", + "args": {"arg": "value"} + } + }); + + let part: FunctionCallPart = serde_json::from_value(json).unwrap(); + + assert_eq!(part.function_call.name, "test_function"); + assert_eq!(part.thought_signature, None); + } + + #[test] + fn test_function_call_part_round_trip() { + let original = FunctionCallPart { + function_call: FunctionCall { + name: "test_function".to_string(), + args: json!({"arg": "value", "nested": {"key": "val"}}), + }, + thought_signature: Some("round_trip_signature".to_string()), + }; + + let serialized = serde_json::to_value(&original).unwrap(); + let deserialized: FunctionCallPart = serde_json::from_value(serialized).unwrap(); + + assert_eq!(deserialized.function_call.name, original.function_call.name); + assert_eq!(deserialized.function_call.args, original.function_call.args); + assert_eq!(deserialized.thought_signature, original.thought_signature); + } + + #[test] + fn test_function_call_part_with_empty_signature_serializes() { + let part = FunctionCallPart { + function_call: FunctionCall { + name: "test_function".to_string(), + args: json!({"arg": "value"}), + }, + thought_signature: Some("".to_string()), + }; + + let serialized = serde_json::to_value(&part).unwrap(); + + // Empty string should still be serialized (normalization happens at a higher level) + assert_eq!(serialized["thoughtSignature"], ""); + } +} diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 6523bbe526848c15053a4bad45dce208a5ecd7e0..da7e660a0171f38b8dd61de1c9323773ded2589b 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -21,7 +21,6 @@ default = ["font-kit", "wayland", "x11", "windows-manifest"] test-support = [ "leak-detection", "collections/test-support", - "rand", "util/test-support", "http_client/test-support", "wayland", @@ -109,7 +108,7 @@ parking = "2.0.0" parking_lot.workspace = true postage.workspace = true profiling.workspace = true -rand = { optional = true, workspace = true } +rand.workspace = true raw-window-handle = "0.6" refineable.workspace = true resvg = { version = "0.45.0", default-features = false, features = [ @@ -121,7 +120,7 @@ usvg = { version = "0.45.0", default-features = false } util_macros.workspace = true schemars.workspace = true seahash = "4.1" -semantic_version.workspace = true +semver.workspace = true serde.workspace = true serde_json.workspace = true slotmap.workspace = true @@ -138,6 +137,8 @@ waker-fn = "1.2.0" lyon = "1.0" libc.workspace = true pin-project = "1.1.10" +circular-buffer.workspace = true +spin = "0.10.0" [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" @@ -156,8 +157,10 @@ media.workspace = true objc.workspace = true objc2 = { version = "0.6", optional = true } objc2-metal = { version = "0.3", optional = true } +mach2.workspace = true #TODO: replace with "objc2" metal.workspace = true +flume = "0.11" [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "macos"))'.dependencies] pathfinder_geometry = "0.5" @@ -185,12 +188,12 @@ font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "11052312 "source-fontconfig-dlopen", ], optional = true } -calloop = { version = "0.13.0" } +calloop = { version = "0.14.3" } filedescriptor = { version = "0.8.2", optional = true } open = { version = "5.2.0", optional = true } # Wayland -calloop-wayland-source = { version = "0.3.0", optional = true } +calloop-wayland-source = { version = "0.4.1", optional = true } wayland-backend = { version = "0.3.3", features = [ "client_system", "dlopen", @@ -327,3 +330,7 @@ path = "examples/window_shadow.rs" [[example]] name = "grid_layout" path = "examples/grid_layout.rs" + +[[example]] +name = "mouse_pressure" +path = "examples/mouse_pressure.rs" diff --git a/crates/gpui/README.md b/crates/gpui/README.md index 2c411f76cd4782904f5e704c446a6f0e76f7d9ab..ad3fd37fc55857699f5fd23cbe4f4f088ee687c8 100644 --- a/crates/gpui/README.md +++ b/crates/gpui/README.md @@ -11,7 +11,7 @@ GPUI is still in active development as we work on the Zed code editor, and is st gpui = { version = "*" } ``` - - [Ownership and data flow](src/_ownership_and_data_flow.rs) + - [Ownership and data flow](_ownership_and_data_flow) Everything in GPUI starts with an `Application`. You can create one with `Application::new()`, and kick off your application by passing a callback to `Application::run()`. Inside this callback, you can create a new window with `App::open_window()`, and register your first root view. See [gpui.rs](https://www.gpui.rs/) for a complete example. @@ -63,4 +63,4 @@ In addition to the systems above, GPUI provides a range of smaller services that - The `[gpui::test]` macro provides a convenient way to write tests for your GPUI applications. Tests also have their own kind of context, a `TestAppContext` which provides ways of simulating common platform input. See `app::test_context` and `test` modules for more details. -Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples, and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog). +Currently, the best way to learn about these APIs is to read the Zed source code or drop a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples, and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog). diff --git a/crates/gpui/build.rs b/crates/gpui/build.rs index ec35ec0bc63113582a945c71198cd7bc14301dcc..c7ae7ac9f239f2f6ce3880f9329f2ba92b2174f3 100644 --- a/crates/gpui/build.rs +++ b/crates/gpui/build.rs @@ -84,6 +84,8 @@ mod macos { .allowlist_var("_dispatch_main_q") .allowlist_var("_dispatch_source_type_data_add") .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH") + .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT") + .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW") .allowlist_var("DISPATCH_TIME_NOW") .allowlist_function("dispatch_get_global_queue") .allowlist_function("dispatch_async_f") diff --git a/crates/gpui/examples/data_table.rs b/crates/gpui/examples/data_table.rs index 56c9625ed3039b872cf4fcc70e84719ce903e268..dd1a443a9dfaa28a5079a034b8214ce1bbf01da8 100644 --- a/crates/gpui/examples/data_table.rs +++ b/crates/gpui/examples/data_table.rs @@ -438,7 +438,7 @@ impl Render for DataTable { }), ) .size_full() - .track_scroll(self.scroll_handle.clone()), + .track_scroll(&self.scroll_handle), ) .child(self.render_scrollbar(window, cx)), ), diff --git a/crates/gpui/examples/focus_visible.rs b/crates/gpui/examples/focus_visible.rs index 737317cabadb7d3358c9c0497b52d4c2ff2e1028..d7c15396f0381ef29b3d6600347fd90a602256f5 100644 --- a/crates/gpui/examples/focus_visible.rs +++ b/crates/gpui/examples/focus_visible.rs @@ -29,7 +29,7 @@ impl Example { ]; let focus_handle = cx.focus_handle(); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); Self { focus_handle, @@ -40,13 +40,13 @@ impl Example { } } - fn on_tab(&mut self, _: &Tab, window: &mut Window, _: &mut Context) { - window.focus_next(); + fn on_tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context) { + window.focus_next(cx); self.message = SharedString::from("Pressed Tab - focus-visible border should appear!"); } - fn on_tab_prev(&mut self, _: &TabPrev, window: &mut Window, _: &mut Context) { - window.focus_prev(); + fn on_tab_prev(&mut self, _: &TabPrev, window: &mut Window, cx: &mut Context) { + window.focus_prev(cx); self.message = SharedString::from("Pressed Shift-Tab - focus-visible border should appear!"); } diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 16af30166c6ccdbd06469f4e2fd4cd3df8352127..44fae4ffe6bb9e120a8f96c10e0af8f4f8026cdd 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -178,7 +178,7 @@ impl TextInput { if position.y > bounds.bottom() { return self.content.len(); } - line.index_for_x(position.x - bounds.left()) + line.closest_index_for_x(position.x - bounds.left()) } fn select_to(&mut self, offset: usize, cx: &mut Context) { @@ -380,7 +380,7 @@ impl EntityInputHandler for TextInput { let last_layout = self.last_layout.as_ref()?; assert_eq!(last_layout.text, self.content); - let utf8_index = last_layout.index_for_x(point.x - line_point.x); + let utf8_index = last_layout.index_for_x(point.x - line_point.x)?; Some(self.offset_to_utf16(utf8_index)) } } @@ -736,7 +736,7 @@ fn main() { window .update(cx, |view, window, cx| { - window.focus(&view.text_input.focus_handle(cx)); + window.focus(&view.text_input.focus_handle(cx), cx); cx.activate(true); }) .unwrap(); diff --git a/crates/gpui/examples/mouse_pressure.rs b/crates/gpui/examples/mouse_pressure.rs new file mode 100644 index 0000000000000000000000000000000000000000..12790f988eedac3009ae619cadbc6f40c4af2e4b --- /dev/null +++ b/crates/gpui/examples/mouse_pressure.rs @@ -0,0 +1,66 @@ +use gpui::{ + App, Application, Bounds, Context, MousePressureEvent, PressureStage, Window, WindowBounds, + WindowOptions, div, prelude::*, px, rgb, size, +}; + +struct MousePressureExample { + pressure_stage: PressureStage, + pressure_amount: f32, +} + +impl Render for MousePressureExample { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .flex() + .flex_col() + .gap_3() + .bg(rgb(0x505050)) + .size(px(500.0)) + .justify_center() + .items_center() + .shadow_lg() + .border_1() + .border_color(rgb(0x0000ff)) + .text_xl() + .text_color(rgb(0xffffff)) + .child(format!("Pressure stage: {:?}", &self.pressure_stage)) + .child(format!("Pressure amount: {:.2}", &self.pressure_amount)) + .on_mouse_pressure(cx.listener(Self::on_mouse_pressure)) + } +} + +impl MousePressureExample { + fn on_mouse_pressure( + &mut self, + pressure_event: &MousePressureEvent, + _window: &mut Window, + cx: &mut Context, + ) { + self.pressure_amount = pressure_event.pressure; + self.pressure_stage = pressure_event.stage; + + cx.notify(); + } +} + +fn main() { + Application::new().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); + + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |_, cx| { + cx.new(|_| MousePressureExample { + pressure_stage: PressureStage::Zero, + pressure_amount: 0.0, + }) + }, + ) + .unwrap(); + + cx.activate(true); + }); +} diff --git a/crates/gpui/examples/on_window_close_quit.rs b/crates/gpui/examples/on_window_close_quit.rs index 8fe24001445d94b1629bf766294d850d0918a5e8..9a2b2f2fee43f753aece55d076be647ad8060965 100644 --- a/crates/gpui/examples/on_window_close_quit.rs +++ b/crates/gpui/examples/on_window_close_quit.rs @@ -55,7 +55,7 @@ fn main() { cx.activate(false); cx.new(|cx| { let focus_handle = cx.focus_handle(); - focus_handle.focus(window); + focus_handle.focus(window, cx); ExampleWindow { focus_handle } }) }, @@ -72,7 +72,7 @@ fn main() { |window, cx| { cx.new(|cx| { let focus_handle = cx.focus_handle(); - focus_handle.focus(window); + focus_handle.focus(window, cx); ExampleWindow { focus_handle } }) }, diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index e7055cbdbbd781523edbc851d143bf56a551728f..9f15d12f469fa6ec5c7be52d30a63b30163ff254 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -1,7 +1,7 @@ use gpui::{ Application, Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder, - PathStyle, Pixels, Point, Render, SharedString, StrokeOptions, Window, WindowOptions, canvas, - div, linear_color_stop, linear_gradient, point, prelude::*, px, quad, rgb, size, + PathStyle, Pixels, Point, Render, StrokeOptions, Window, WindowOptions, canvas, div, + linear_color_stop, linear_gradient, point, prelude::*, px, quad, rgb, size, }; struct PaintingViewer { @@ -309,7 +309,7 @@ fn button( on_click: impl Fn(&mut PaintingViewer, &mut Context) + 'static, ) -> impl IntoElement { div() - .id(SharedString::from(text.to_string())) + .id(text.to_string()) .child(text.to_string()) .bg(gpui::black()) .text_color(gpui::white()) diff --git a/crates/gpui/examples/tab_stop.rs b/crates/gpui/examples/tab_stop.rs index 8dbcbeccb7351fda18e8d36fe38d8f26c4a70cc9..4d99da1a07a123e9a18b3c64a90834c31bd76909 100644 --- a/crates/gpui/examples/tab_stop.rs +++ b/crates/gpui/examples/tab_stop.rs @@ -22,7 +22,7 @@ impl Example { ]; let focus_handle = cx.focus_handle(); - window.focus(&focus_handle); + window.focus(&focus_handle, cx); Self { focus_handle, @@ -31,13 +31,13 @@ impl Example { } } - fn on_tab(&mut self, _: &Tab, window: &mut Window, _: &mut Context) { - window.focus_next(); + fn on_tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context) { + window.focus_next(cx); self.message = SharedString::from("You have pressed `Tab`."); } - fn on_tab_prev(&mut self, _: &TabPrev, window: &mut Window, _: &mut Context) { - window.focus_prev(); + fn on_tab_prev(&mut self, _: &TabPrev, window: &mut Window, cx: &mut Context) { + window.focus_prev(cx); self.message = SharedString::from("You have pressed `Shift-Tab`."); } } diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs index 4445f24e4ec0f2809109964fd34610cad1299e90..06003c4663ee5711283a85684c25b9f5d8c5b743 100644 --- a/crates/gpui/examples/window.rs +++ b/crates/gpui/examples/window.rs @@ -1,6 +1,6 @@ use gpui::{ - App, Application, Bounds, Context, KeyBinding, PromptButton, PromptLevel, SharedString, Timer, - Window, WindowBounds, WindowKind, WindowOptions, actions, div, prelude::*, px, rgb, size, + App, Application, Bounds, Context, KeyBinding, PromptButton, PromptLevel, Timer, Window, + WindowBounds, WindowKind, WindowOptions, actions, div, prelude::*, px, rgb, size, }; struct SubWindow { @@ -9,7 +9,7 @@ struct SubWindow { fn button(text: &str, on_click: impl Fn(&mut Window, &mut App) + 'static) -> impl IntoElement { div() - .id(SharedString::from(text.to_string())) + .id(text.to_string()) .flex_none() .px_2() .bg(rgb(0xf7f7f7)) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 864968b9e7a9ad862d9b67a19cc8897524dffb9e..7bd0daf56a466666b8cf5ae70f6b7cb5597a0d10 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -38,10 +38,11 @@ use crate::{ AssetSource, BackgroundExecutor, Bounds, ClipboardItem, CursorStyle, DispatchPhase, DisplayId, EventEmitter, FocusHandle, FocusMap, ForegroundExecutor, Global, KeyBinding, KeyContext, Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, - PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, Point, PromptBuilder, - PromptButton, PromptHandle, PromptLevel, Render, RenderImage, RenderablePromptHandle, - Reservation, ScreenCaptureSource, SharedString, SubscriberSet, Subscription, SvgRenderer, Task, - TextSystem, Window, WindowAppearance, WindowHandle, WindowId, WindowInvalidator, + PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, Point, Priority, + PromptBuilder, PromptButton, PromptHandle, PromptLevel, Render, RenderImage, + RenderablePromptHandle, Reservation, ScreenCaptureSource, SharedString, SubscriberSet, + Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance, WindowHandle, WindowId, + WindowInvalidator, colors::{Colors, GlobalColors}, current_platform, hash, init_app_menus, }; @@ -551,12 +552,39 @@ impl SystemWindowTabController { } } +pub(crate) enum GpuiMode { + #[cfg(any(test, feature = "test-support"))] + Test { + skip_drawing: bool, + }, + Production, +} + +impl GpuiMode { + #[cfg(any(test, feature = "test-support"))] + pub fn test() -> Self { + GpuiMode::Test { + skip_drawing: false, + } + } + + #[inline] + pub(crate) fn skip_drawing(&self) -> bool { + match self { + #[cfg(any(test, feature = "test-support"))] + GpuiMode::Test { skip_drawing } => *skip_drawing, + GpuiMode::Production => false, + } + } +} + /// Contains the state of the full application, and passed as a reference to a variety of callbacks. /// Other [Context] derefs to this type. /// You need a reference to an `App` to access the state of a [Entity]. pub struct App { pub(crate) this: Weak, pub(crate) platform: Rc, + pub(crate) mode: GpuiMode, text_system: Arc, flushing_effects: bool, pending_updates: usize, @@ -635,6 +663,7 @@ impl App { this: this.clone(), platform: platform.clone(), text_system, + mode: GpuiMode::Production, actions: Rc::new(ActionRegistry::default()), flushing_effects: false, pending_updates: 0, @@ -1410,7 +1439,7 @@ impl App { let quit_on_empty = match cx.quit_mode { QuitMode::Explicit => false, QuitMode::LastWindowClosed => true, - QuitMode::Default => !cfg!(macos), + QuitMode::Default => cfg!(not(target_os = "macos")), }; if quit_on_empty && cx.windows.is_empty() { @@ -1466,6 +1495,24 @@ impl App { .spawn(async move { f(&mut cx).await }) } + /// Spawns the future returned by the given function on the main thread with + /// the given priority. The closure will be invoked with [AsyncApp], which + /// allows the application state to be accessed across await points. + pub fn spawn_with_priority(&self, priority: Priority, f: AsyncFn) -> Task + where + AsyncFn: AsyncFnOnce(&mut AsyncApp) -> R + 'static, + R: 'static, + { + if self.quitting { + debug_panic!("Can't spawn on main thread after on_app_quit") + }; + + let mut cx = self.to_async(); + + self.foreground_executor + .spawn_with_priority(priority, async move { f(&mut cx).await }) + } + /// Schedules the given function to be run at the end of the current effect cycle, allowing entities /// that are currently on the stack to be returned to the app. pub fn defer(&mut self, f: impl FnOnce(&mut App) + 'static) { @@ -1730,7 +1777,10 @@ impl App { /// Register a global handler for actions invoked via the keyboard. These handlers are run at /// the end of the bubble phase for actions, and so will only be invoked if there are no other /// handlers or if they called `cx.propagate()`. - pub fn on_action(&mut self, listener: impl Fn(&A, &mut Self) + 'static) { + pub fn on_action( + &mut self, + listener: impl Fn(&A, &mut Self) + 'static, + ) -> &mut Self { self.global_action_listeners .entry(TypeId::of::()) .or_default() @@ -1740,6 +1790,7 @@ impl App { listener(action, cx) } })); + self } /// Event handlers propagate events by default. Call this method to stop dispatching to @@ -1849,8 +1900,11 @@ impl App { pub(crate) fn clear_pending_keystrokes(&mut self) { for window in self.windows() { window - .update(self, |_, window, _| { - window.clear_pending_keystrokes(); + .update(self, |_, window, cx| { + if window.pending_input_keystrokes().is_some() { + window.clear_pending_keystrokes(); + window.pending_input_changed(cx); + } }) .ok(); } @@ -2400,10 +2454,6 @@ impl HttpClient for NullHttpClient { fn proxy(&self) -> Option<&Url> { None } - - fn type_name(&self) -> &'static str { - type_name::() - } } /// A mutable reference to an entity owned by GPUI diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index 381541d4b11377b988dd30e03155855c7ba25aed..805dfced162cd27f0cc785a8282ae3b802c2873a 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -296,8 +296,8 @@ impl AsyncWindowContext { /// A convenience method for [`Window::on_next_frame`]. pub fn on_next_frame(&mut self, f: impl FnOnce(&mut Window, &mut App) + 'static) { - self.window - .update(self, |_, window, _| window.on_next_frame(f)) + self.app + .update_window(self.window, |_, window, _| window.on_next_frame(f)) .ok(); } @@ -306,8 +306,8 @@ impl AsyncWindowContext { &mut self, read: impl FnOnce(&G, &Window, &App) -> R, ) -> Result { - self.window - .update(self, |_, window, cx| read(cx.global(), window, cx)) + self.app + .update_window(self.window, |_, window, cx| read(cx.global(), window, cx)) } /// A convenience method for [`App::update_global`](BorrowAppContext::update_global). @@ -319,7 +319,7 @@ impl AsyncWindowContext { where G: Global, { - self.window.update(self, |_, window, cx| { + self.app.update_window(self.window, |_, window, cx| { cx.update_global(|global, cx| update(global, window, cx)) }) } @@ -350,8 +350,8 @@ impl AsyncWindowContext { where T: Clone + Into, { - self.window - .update(self, |_, window, cx| { + self.app + .update_window(self.window, |_, window, cx| { window.prompt(level, message, detail, answers, cx) }) .unwrap_or_else(|_| oneshot::channel().1) @@ -365,11 +365,13 @@ impl AppContext for AsyncWindowContext { where T: 'static, { - self.window.update(self, |_, _, cx| cx.new(build_entity)) + self.app + .update_window(self.window, |_, _, cx| cx.new(build_entity)) } fn reserve_entity(&mut self) -> Result> { - self.window.update(self, |_, _, cx| cx.reserve_entity()) + self.app + .update_window(self.window, |_, _, cx| cx.reserve_entity()) } fn insert_entity( @@ -377,8 +379,9 @@ impl AppContext for AsyncWindowContext { reservation: Reservation, build_entity: impl FnOnce(&mut Context) -> T, ) -> Self::Result> { - self.window - .update(self, |_, _, cx| cx.insert_entity(reservation, build_entity)) + self.app.update_window(self.window, |_, _, cx| { + cx.insert_entity(reservation, build_entity) + }) } fn update_entity( @@ -386,8 +389,8 @@ impl AppContext for AsyncWindowContext { handle: &Entity, update: impl FnOnce(&mut T, &mut Context) -> R, ) -> Result { - self.window - .update(self, |_, _, cx| cx.update_entity(handle, update)) + self.app + .update_window(self.window, |_, _, cx| cx.update_entity(handle, update)) } fn as_mut<'a, T>(&'a mut self, _: &Entity) -> Self::Result> @@ -452,8 +455,9 @@ impl VisualContext for AsyncWindowContext { &mut self, build_entity: impl FnOnce(&mut Window, &mut Context) -> T, ) -> Self::Result> { - self.window - .update(self, |_, window, cx| cx.new(|cx| build_entity(window, cx))) + self.app.update_window(self.window, |_, window, cx| { + cx.new(|cx| build_entity(window, cx)) + }) } fn update_window_entity( @@ -461,7 +465,7 @@ impl VisualContext for AsyncWindowContext { view: &Entity, update: impl FnOnce(&mut T, &mut Window, &mut Context) -> R, ) -> Self::Result { - self.window.update(self, |_, window, cx| { + self.app.update_window(self.window, |_, window, cx| { view.update(cx, |entity, cx| update(entity, window, cx)) }) } @@ -473,16 +477,17 @@ impl VisualContext for AsyncWindowContext { where V: 'static + Render, { - self.window - .update(self, |_, window, cx| window.replace_root(cx, build_view)) + self.app.update_window(self.window, |_, window, cx| { + window.replace_root(cx, build_view) + }) } fn focus(&mut self, view: &Entity) -> Self::Result<()> where V: Focusable, { - self.window.update(self, |_, window, cx| { - view.read(cx).focus_handle(cx).focus(window); + self.app.update_window(self.window, |_, window, cx| { + view.read(cx).focus_handle(cx).focus(window, cx); }) } } diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index 41d6cac82b7c179040d61ddfd22b003c143a5fb9..b780ca426c15c99030f24ee48bde978ad38526e7 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -1,7 +1,7 @@ use crate::{ AnyView, AnyWindowHandle, AppContext, AsyncApp, DispatchPhase, Effect, EntityId, EventEmitter, - FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, Reservation, SubscriberSet, - Subscription, Task, WeakEntity, WeakFocusHandle, Window, WindowHandle, + FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, Priority, Reservation, + SubscriberSet, Subscription, Task, WeakEntity, WeakFocusHandle, Window, WindowHandle, }; use anyhow::Result; use futures::FutureExt; @@ -285,7 +285,7 @@ impl<'a, T: 'static> Context<'a, T> { /// Focus the given view in the given window. View type is required to implement Focusable. pub fn focus_view(&mut self, view: &Entity, window: &mut Window) { - window.focus(&view.focus_handle(self)); + window.focus(&view.focus_handle(self), self); } /// Sets a given callback to be run on the next frame. @@ -667,6 +667,25 @@ impl<'a, T: 'static> Context<'a, T> { window.spawn(self, async move |cx| f(view, cx).await) } + /// Schedule a future to be run asynchronously with the given priority. + /// The given callback is invoked with a [`WeakEntity`] to avoid leaking the entity for a long-running process. + /// It's also given an [`AsyncWindowContext`], which can be used to access the state of the entity across await points. + /// The returned future will be polled on the main thread. + #[track_caller] + pub fn spawn_in_with_priority( + &self, + priority: Priority, + window: &Window, + f: AsyncFn, + ) -> Task + where + R: 'static, + AsyncFn: AsyncFnOnce(WeakEntity, &mut AsyncWindowContext) -> R + 'static, + { + let view = self.weak_entity(); + window.spawn_with_priority(priority, self, async move |cx| f(view, cx).await) + } + /// Register a callback to be invoked when the given global state changes. pub fn observe_global_in( &mut self, @@ -713,7 +732,7 @@ impl<'a, T: 'static> Context<'a, T> { { let view = self.entity(); window.defer(self, move |window, cx| { - view.read(cx).focus_handle(cx).focus(window) + view.read(cx).focus_handle(cx).focus(window, cx) }) } } @@ -736,14 +755,17 @@ impl Context<'_, T> { impl AppContext for Context<'_, T> { type Result = U; + #[inline] fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> U) -> Entity { self.app.new(build_entity) } + #[inline] fn reserve_entity(&mut self) -> Reservation { self.app.reserve_entity() } + #[inline] fn insert_entity( &mut self, reservation: Reservation, @@ -752,6 +774,7 @@ impl AppContext for Context<'_, T> { self.app.insert_entity(reservation, build_entity) } + #[inline] fn update_entity( &mut self, handle: &Entity, @@ -760,6 +783,7 @@ impl AppContext for Context<'_, T> { self.app.update_entity(handle, update) } + #[inline] fn as_mut<'a, E>(&'a mut self, handle: &Entity) -> Self::Result> where E: 'static, @@ -767,6 +791,7 @@ impl AppContext for Context<'_, T> { self.app.as_mut(handle) } + #[inline] fn read_entity( &self, handle: &Entity, @@ -778,6 +803,7 @@ impl AppContext for Context<'_, T> { self.app.read_entity(handle, read) } + #[inline] fn update_window(&mut self, window: AnyWindowHandle, update: F) -> Result where F: FnOnce(AnyView, &mut Window, &mut App) -> R, @@ -785,6 +811,7 @@ impl AppContext for Context<'_, T> { self.app.update_window(window, update) } + #[inline] fn read_window( &self, window: &WindowHandle, @@ -796,6 +823,7 @@ impl AppContext for Context<'_, T> { self.app.read_window(window, read) } + #[inline] fn background_spawn(&self, future: impl Future + Send + 'static) -> Task where R: Send + 'static, @@ -803,6 +831,7 @@ impl AppContext for Context<'_, T> { self.app.background_executor.spawn(future) } + #[inline] fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> Self::Result where G: Global, diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index bea98cb06a5f80fc8141a52bc47f48e8734b40c9..8c1bdfa1cee509dcbc061200cb651ce5d3bf4fcd 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -244,11 +244,13 @@ impl AnyEntity { } /// Returns the id associated with this entity. + #[inline] pub fn entity_id(&self) -> EntityId { self.entity_id } /// Returns the [TypeId] associated with this entity. + #[inline] pub fn entity_type(&self) -> TypeId { self.entity_type } @@ -332,18 +334,21 @@ impl Drop for AnyEntity { } impl From> for AnyEntity { + #[inline] fn from(entity: Entity) -> Self { entity.any_entity } } impl Hash for AnyEntity { + #[inline] fn hash(&self, state: &mut H) { self.entity_id.hash(state); } } impl PartialEq for AnyEntity { + #[inline] fn eq(&self, other: &Self) -> bool { self.entity_id == other.entity_id } @@ -352,12 +357,14 @@ impl PartialEq for AnyEntity { impl Eq for AnyEntity {} impl Ord for AnyEntity { + #[inline] fn cmp(&self, other: &Self) -> Ordering { self.entity_id.cmp(&other.entity_id) } } impl PartialOrd for AnyEntity { + #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } @@ -384,6 +391,7 @@ pub struct Entity { impl Sealed for Entity {} impl Entity { + #[inline] fn new(id: EntityId, entity_map: Weak>) -> Self where T: 'static, @@ -395,11 +403,13 @@ impl Entity { } /// Get the entity ID associated with this entity + #[inline] pub fn entity_id(&self) -> EntityId { self.any_entity.entity_id } /// Downgrade this entity pointer to a non-retaining weak pointer + #[inline] pub fn downgrade(&self) -> WeakEntity { WeakEntity { any_entity: self.any_entity.downgrade(), @@ -408,16 +418,19 @@ impl Entity { } /// Convert this into a dynamically typed entity. + #[inline] pub fn into_any(self) -> AnyEntity { self.any_entity } /// Grab a reference to this entity from the context. + #[inline] pub fn read<'a>(&self, cx: &'a App) -> &'a T { cx.entities.read(self) } /// Read the entity referenced by this handle with the given function. + #[inline] pub fn read_with( &self, cx: &C, @@ -427,6 +440,7 @@ impl Entity { } /// Updates the entity referenced by this handle with the given function. + #[inline] pub fn update( &self, cx: &mut C, @@ -436,6 +450,7 @@ impl Entity { } /// Updates the entity referenced by this handle with the given function. + #[inline] pub fn as_mut<'a, C: AppContext>(&self, cx: &'a mut C) -> C::Result> { cx.as_mut(self) } @@ -451,6 +466,7 @@ impl Entity { /// Updates the entity referenced by this handle with the given function if /// the referenced entity still exists, within a visual context that has a window. /// Returns an error if the entity has been released. + #[inline] pub fn update_in( &self, cx: &mut C, @@ -461,6 +477,7 @@ impl Entity { } impl Clone for Entity { + #[inline] fn clone(&self) -> Self { Self { any_entity: self.any_entity.clone(), @@ -479,12 +496,14 @@ impl std::fmt::Debug for Entity { } impl Hash for Entity { + #[inline] fn hash(&self, state: &mut H) { self.any_entity.hash(state); } } impl PartialEq for Entity { + #[inline] fn eq(&self, other: &Self) -> bool { self.any_entity == other.any_entity } @@ -493,18 +512,21 @@ impl PartialEq for Entity { impl Eq for Entity {} impl PartialEq> for Entity { + #[inline] fn eq(&self, other: &WeakEntity) -> bool { self.any_entity.entity_id() == other.entity_id() } } impl Ord for Entity { + #[inline] fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.entity_id().cmp(&other.entity_id()) } } impl PartialOrd for Entity { + #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } @@ -520,6 +542,7 @@ pub struct AnyWeakEntity { impl AnyWeakEntity { /// Get the entity ID associated with this weak reference. + #[inline] pub fn entity_id(&self) -> EntityId { self.entity_id } @@ -561,7 +584,33 @@ impl AnyWeakEntity { }) } - /// Assert that entity referenced by this weak handle has been released. + /// Asserts that the entity referenced by this weak handle has been fully released. + /// + /// # Example + /// + /// ```ignore + /// let entity = cx.new(|_| MyEntity::new()); + /// let weak = entity.downgrade(); + /// drop(entity); + /// + /// // Verify the entity was released + /// weak.assert_released(); + /// ``` + /// + /// # Debugging Leaks + /// + /// If this method panics due to leaked handles, set the `LEAK_BACKTRACE` environment + /// variable to see where the leaked handles were allocated: + /// + /// ```bash + /// LEAK_BACKTRACE=1 cargo test my_test + /// ``` + /// + /// # Panics + /// + /// - Panics if any strong handles to the entity are still alive. + /// - Panics if the entity was recently dropped but cleanup hasn't completed yet + /// (resources are retained until the end of the effect cycle). #[cfg(any(test, feature = "leak-detection"))] pub fn assert_released(&self) { self.entity_ref_counts @@ -618,18 +667,21 @@ impl std::fmt::Debug for AnyWeakEntity { } impl From> for AnyWeakEntity { + #[inline] fn from(entity: WeakEntity) -> Self { entity.any_entity } } impl Hash for AnyWeakEntity { + #[inline] fn hash(&self, state: &mut H) { self.entity_id.hash(state); } } impl PartialEq for AnyWeakEntity { + #[inline] fn eq(&self, other: &Self) -> bool { self.entity_id == other.entity_id } @@ -638,12 +690,14 @@ impl PartialEq for AnyWeakEntity { impl Eq for AnyWeakEntity {} impl Ord for AnyWeakEntity { + #[inline] fn cmp(&self, other: &Self) -> Ordering { self.entity_id.cmp(&other.entity_id) } } impl PartialOrd for AnyWeakEntity { + #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } @@ -740,6 +794,7 @@ impl WeakEntity { } /// Create a new weak entity that can never be upgraded. + #[inline] pub fn new_invalid() -> Self { Self { any_entity: AnyWeakEntity::new_invalid(), @@ -749,12 +804,14 @@ impl WeakEntity { } impl Hash for WeakEntity { + #[inline] fn hash(&self, state: &mut H) { self.any_entity.hash(state); } } impl PartialEq for WeakEntity { + #[inline] fn eq(&self, other: &Self) -> bool { self.any_entity == other.any_entity } @@ -763,33 +820,90 @@ impl PartialEq for WeakEntity { impl Eq for WeakEntity {} impl PartialEq> for WeakEntity { + #[inline] fn eq(&self, other: &Entity) -> bool { self.entity_id() == other.any_entity.entity_id() } } impl Ord for WeakEntity { + #[inline] fn cmp(&self, other: &Self) -> Ordering { self.entity_id().cmp(&other.entity_id()) } } impl PartialOrd for WeakEntity { + #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } +/// Controls whether backtraces are captured when entity handles are created. +/// +/// Set the `LEAK_BACKTRACE` environment variable to any non-empty value to enable +/// backtrace capture. This helps identify where leaked handles were allocated. #[cfg(any(test, feature = "leak-detection"))] static LEAK_BACKTRACE: std::sync::LazyLock = std::sync::LazyLock::new(|| std::env::var("LEAK_BACKTRACE").is_ok_and(|b| !b.is_empty())); +/// Unique identifier for a specific entity handle instance. +/// +/// This is distinct from `EntityId` - while multiple handles can point to the same +/// entity (same `EntityId`), each handle has its own unique `HandleId`. #[cfg(any(test, feature = "leak-detection"))] #[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq)] pub(crate) struct HandleId { - id: u64, // id of the handle itself, not the pointed at object + id: u64, } +/// Tracks entity handle allocations to detect leaks. +/// +/// The leak detector is enabled in tests and when the `leak-detection` feature is active. +/// It tracks every `Entity` and `AnyEntity` handle that is created and released, +/// allowing you to verify that all handles to an entity have been properly dropped. +/// +/// # How do leaks happen? +/// +/// Entities are reference-counted structures that can own other entities +/// allowing to form cycles. If such a strong-reference counted cycle is +/// created, all participating strong entities in this cycle will effectively +/// leak as they cannot be released anymore. +/// +/// # Usage +/// +/// You can use `WeakEntity::assert_released` or `AnyWeakEntity::assert_released` +/// to verify that an entity has been fully released: +/// +/// ```ignore +/// let entity = cx.new(|_| MyEntity::new()); +/// let weak = entity.downgrade(); +/// drop(entity); +/// +/// // This will panic if any handles to the entity are still alive +/// weak.assert_released(); +/// ``` +/// +/// # Debugging Leaks +/// +/// When a leak is detected, the detector will panic with information about the leaked +/// handles. To see where the leaked handles were allocated, set the `LEAK_BACKTRACE` +/// environment variable: +/// +/// ```bash +/// LEAK_BACKTRACE=1 cargo test my_test +/// ``` +/// +/// This will capture and display backtraces for each leaked handle, helping you +/// identify where handles were created but not released. +/// +/// # How It Works +/// +/// - When an entity handle is created (via `Entity::new`, `Entity::clone`, or +/// `WeakEntity::upgrade`), `handle_created` is called to register the handle. +/// - When a handle is dropped, `handle_released` removes it from tracking. +/// - `assert_released` verifies that no handles remain for a given entity. #[cfg(any(test, feature = "leak-detection"))] pub(crate) struct LeakDetector { next_handle_id: u64, @@ -798,6 +912,11 @@ pub(crate) struct LeakDetector { #[cfg(any(test, feature = "leak-detection"))] impl LeakDetector { + /// Records that a new handle has been created for the given entity. + /// + /// Returns a unique `HandleId` that must be passed to `handle_released` when + /// the handle is dropped. If `LEAK_BACKTRACE` is set, captures a backtrace + /// at the allocation site. #[track_caller] pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId { let id = util::post_inc(&mut self.next_handle_id); @@ -810,23 +929,40 @@ impl LeakDetector { handle_id } + /// Records that a handle has been released (dropped). + /// + /// This removes the handle from tracking. The `handle_id` should be the same + /// one returned by `handle_created` when the handle was allocated. pub fn handle_released(&mut self, entity_id: EntityId, handle_id: HandleId) { let handles = self.entity_handles.entry(entity_id).or_default(); handles.remove(&handle_id); } + /// Asserts that all handles to the given entity have been released. + /// + /// # Panics + /// + /// Panics if any handles to the entity are still alive. The panic message + /// includes backtraces for each leaked handle if `LEAK_BACKTRACE` is set, + /// otherwise it suggests setting the environment variable to get more info. pub fn assert_released(&mut self, entity_id: EntityId) { + use std::fmt::Write as _; let handles = self.entity_handles.entry(entity_id).or_default(); if !handles.is_empty() { + let mut out = String::new(); for backtrace in handles.values_mut() { if let Some(mut backtrace) = backtrace.take() { backtrace.resolve(); - eprintln!("Leaked handle: {:#?}", backtrace); + writeln!(out, "Leaked handle:\n{:?}", backtrace).unwrap(); } else { - eprintln!("Leaked handle: export LEAK_BACKTRACE to find allocation site"); + writeln!( + out, + "Leaked handle: (export LEAK_BACKTRACE to find allocation site)" + ) + .unwrap(); } } - panic!(); + panic!("{out}"); } } } diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 4a7b73c359ed3dd55b136b22e9487dee1735e42e..9b982f9a1ca3c14b99dfc93e938aafe4e2f75cff 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -5,7 +5,7 @@ use crate::{ ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, - WindowHandle, WindowOptions, + WindowHandle, WindowOptions, app::GpuiMode, }; use anyhow::{anyhow, bail}; use futures::{Stream, StreamExt, channel::oneshot}; @@ -132,8 +132,11 @@ impl TestAppContext { let http_client = http_client::FakeHttpClient::with_404_response(); let text_system = Arc::new(TextSystem::new(platform.text_system())); + let mut app = App::new_app(platform.clone(), asset_source, http_client); + app.borrow_mut().mode = GpuiMode::test(); + Self { - app: App::new_app(platform.clone(), asset_source, http_client), + app, background_executor, foreground_executor, dispatcher, @@ -144,6 +147,11 @@ impl TestAppContext { } } + /// Skip all drawing operations for the duration of this test. + pub fn skip_drawing(&mut self) { + self.app.borrow_mut().mode = GpuiMode::Test { skip_drawing: true }; + } + /// Create a single TestAppContext, for non-multi-client tests pub fn single() -> Self { let dispatcher = TestDispatcher::new(StdRng::seed_from_u64(0)); @@ -1037,7 +1045,7 @@ impl VisualContext for VisualTestContext { fn focus(&mut self, view: &Entity) -> Self::Result<()> { self.window .update(&mut self.cx, |_, window, cx| { - view.read(cx).focus_handle(cx).focus(window) + view.read(cx).focus_handle(cx).focus(window, cx) }) .unwrap() } diff --git a/crates/gpui/src/bounds_tree.rs b/crates/gpui/src/bounds_tree.rs index d621609bf7334801059513e03dfd11b4036ea816..9cf86a2cc9b6def8fbf5ca7e94f7cd19236468cc 100644 --- a/crates/gpui/src/bounds_tree.rs +++ b/crates/gpui/src/bounds_tree.rs @@ -5,14 +5,91 @@ use std::{ ops::{Add, Sub}, }; +/// Maximum children per internal node (R-tree style branching factor). +/// Higher values = shorter tree = fewer cache misses, but more work per node. +const MAX_CHILDREN: usize = 12; + +/// A spatial tree optimized for finding maximum ordering among intersecting bounds. +/// +/// This is an R-tree variant specifically designed for the use case of assigning +/// z-order to overlapping UI elements. Key optimizations: +/// - Tracks the leaf with global max ordering for O(1) fast-path queries +/// - Uses higher branching factor (4) for lower tree height +/// - Aggressive pruning during search based on max_order metadata #[derive(Debug)] pub(crate) struct BoundsTree where U: Clone + Debug + Default + PartialEq, { - root: Option, + /// All nodes stored contiguously for cache efficiency. nodes: Vec>, - stack: Vec, + /// Index of the root node, if any. + root: Option, + /// Index of the leaf with the highest ordering (for fast-path lookups). + max_leaf: Option, + /// Reusable stack for tree traversal during insertion. + insert_path: Vec, + /// Reusable stack for search operations. + search_stack: Vec, +} + +/// A node in the bounds tree. +#[derive(Debug, Clone)] +struct Node +where + U: Clone + Debug + Default + PartialEq, +{ + /// Bounding box containing this node and all descendants. + bounds: Bounds, + /// Maximum ordering value in this subtree. + max_order: u32, + /// Node-specific data. + kind: NodeKind, +} + +#[derive(Debug, Clone)] +enum NodeKind { + /// Leaf node containing actual bounds data. + Leaf { + /// The ordering assigned to this bounds. + order: u32, + }, + /// Internal node with children. + Internal { + /// Indices of child nodes (2 to MAX_CHILDREN). + children: NodeChildren, + }, +} + +/// Fixed-size array for child indices, avoiding heap allocation. +#[derive(Debug, Clone)] +struct NodeChildren { + // Keeps an invariant where the max order child is always at the end + indices: [usize; MAX_CHILDREN], + len: u8, +} + +impl NodeChildren { + fn new() -> Self { + Self { + indices: [0; MAX_CHILDREN], + len: 0, + } + } + + fn push(&mut self, index: usize) { + debug_assert!((self.len as usize) < MAX_CHILDREN); + self.indices[self.len as usize] = index; + self.len += 1; + } + + fn len(&self) -> usize { + self.len as usize + } + + fn as_slice(&self) -> &[usize] { + &self.indices[..self.len as usize] + } } impl BoundsTree @@ -26,158 +103,250 @@ where + Half + Default, { + /// Clears all nodes from the tree. pub fn clear(&mut self) { - self.root = None; self.nodes.clear(); - self.stack.clear(); + self.root = None; + self.max_leaf = None; + self.insert_path.clear(); + self.search_stack.clear(); } + /// Inserts bounds into the tree and returns its assigned ordering. + /// + /// The ordering is one greater than the maximum ordering of any + /// existing bounds that intersect with the new bounds. pub fn insert(&mut self, new_bounds: Bounds) -> u32 { - // If the tree is empty, make the root the new leaf. - let Some(mut index) = self.root else { - let new_node = self.push_leaf(new_bounds, 1); - self.root = Some(new_node); - return 1; + // Find maximum ordering among intersecting bounds + let max_intersecting = self.find_max_ordering(&new_bounds); + let ordering = max_intersecting + 1; + + // Insert the new leaf + let new_leaf_idx = self.insert_leaf(new_bounds, ordering); + + // Update max_leaf tracking + self.max_leaf = match self.max_leaf { + None => Some(new_leaf_idx), + Some(old_idx) if self.nodes[old_idx].max_order < ordering => Some(new_leaf_idx), + some => some, }; - // Search for the best place to add the new leaf based on heuristics. - let mut max_intersecting_ordering = 0; - while let Node::Internal { - left, - right, - bounds: node_bounds, - .. - } = &mut self.nodes[index] - { - let left = *left; - let right = *right; - *node_bounds = node_bounds.union(&new_bounds); - self.stack.push(index); - - // Descend to the best-fit child, based on which one would increase - // the surface area the least. This attempts to keep the tree balanced - // in terms of surface area. If there is an intersection with the other child, - // add its keys to the intersections vector. - let left_cost = new_bounds.union(self.nodes[left].bounds()).half_perimeter(); - let right_cost = new_bounds - .union(self.nodes[right].bounds()) - .half_perimeter(); - if left_cost < right_cost { - max_intersecting_ordering = - self.find_max_ordering(right, &new_bounds, max_intersecting_ordering); - index = left; - } else { - max_intersecting_ordering = - self.find_max_ordering(left, &new_bounds, max_intersecting_ordering); - index = right; + ordering + } + + /// Finds the maximum ordering among all bounds that intersect with the query. + fn find_max_ordering(&mut self, query: &Bounds) -> u32 { + let Some(root_idx) = self.root else { + return 0; + }; + + // Fast path: check if the max-ordering leaf intersects + if let Some(max_idx) = self.max_leaf { + let max_node = &self.nodes[max_idx]; + if query.intersects(&max_node.bounds) { + return max_node.max_order; } } - // We've found a leaf ('index' now refers to a leaf node). - // We'll insert a new parent node above the leaf and attach our new leaf to it. - let sibling = index; - - // Check for collision with the located leaf node - let Node::Leaf { - bounds: sibling_bounds, - order: sibling_ordering, - .. - } = &self.nodes[index] - else { - unreachable!(); - }; - if sibling_bounds.intersects(&new_bounds) { - max_intersecting_ordering = cmp::max(max_intersecting_ordering, *sibling_ordering); + // Slow path: search the tree + self.search_stack.clear(); + self.search_stack.push(root_idx); + + let mut max_found = 0u32; + + while let Some(node_idx) = self.search_stack.pop() { + let node = &self.nodes[node_idx]; + + // Pruning: skip if this subtree can't improve our result + if node.max_order <= max_found { + continue; + } + + // Spatial pruning: skip if bounds don't intersect + if !query.intersects(&node.bounds) { + continue; + } + + match &node.kind { + NodeKind::Leaf { order } => { + max_found = cmp::max(max_found, *order); + } + NodeKind::Internal { children } => { + // Children are maintained with highest max_order at the end. + // Push in forward order to highest (last) is popped first. + for &child_idx in children.as_slice() { + if self.nodes[child_idx].max_order > max_found { + self.search_stack.push(child_idx); + } + } + } + } } - let ordering = max_intersecting_ordering + 1; - let new_node = self.push_leaf(new_bounds, ordering); - let new_parent = self.push_internal(sibling, new_node); + max_found + } - // If there was an old parent, we need to update its children indices. - if let Some(old_parent) = self.stack.last().copied() { - let Node::Internal { left, right, .. } = &mut self.nodes[old_parent] else { - unreachable!(); - }; + /// Inserts a leaf node with the given bounds and ordering. + /// Returns the index of the new leaf. + fn insert_leaf(&mut self, bounds: Bounds, order: u32) -> usize { + let new_leaf_idx = self.nodes.len(); + self.nodes.push(Node { + bounds: bounds.clone(), + max_order: order, + kind: NodeKind::Leaf { order }, + }); - if *left == sibling { - *left = new_parent; + let Some(root_idx) = self.root else { + // Tree is empty, new leaf becomes root + self.root = Some(new_leaf_idx); + return new_leaf_idx; + }; + + // If root is a leaf, create internal node with both + if matches!(self.nodes[root_idx].kind, NodeKind::Leaf { .. }) { + let root_bounds = self.nodes[root_idx].bounds.clone(); + let root_order = self.nodes[root_idx].max_order; + + let mut children = NodeChildren::new(); + // Max end invariant + if order > root_order { + children.push(root_idx); + children.push(new_leaf_idx); } else { - *right = new_parent; + children.push(new_leaf_idx); + children.push(root_idx); } - } else { - // If the old parent was the root, the new parent is the new root. - self.root = Some(new_parent); + + let new_root_idx = self.nodes.len(); + self.nodes.push(Node { + bounds: root_bounds.union(&bounds), + max_order: cmp::max(root_order, order), + kind: NodeKind::Internal { children }, + }); + self.root = Some(new_root_idx); + return new_leaf_idx; } - for node_index in self.stack.drain(..).rev() { - let Node::Internal { - max_order: max_ordering, - .. - } = &mut self.nodes[node_index] - else { - unreachable!() + // Descend to find the best internal node to insert into + self.insert_path.clear(); + let mut current_idx = root_idx; + + loop { + let current = &self.nodes[current_idx]; + let NodeKind::Internal { children } = ¤t.kind else { + unreachable!("Should only traverse internal nodes"); }; - if *max_ordering >= ordering { - break; - } - *max_ordering = ordering; - } - ordering - } + self.insert_path.push(current_idx); + + // Find the best child to descend into + let mut best_child_idx = children.as_slice()[0]; + let mut best_child_pos = 0; + let mut best_cost = bounds + .union(&self.nodes[best_child_idx].bounds) + .half_perimeter(); - fn find_max_ordering(&self, index: usize, bounds: &Bounds, mut max_ordering: u32) -> u32 { - match &self.nodes[index] { - Node::Leaf { - bounds: node_bounds, - order: ordering, - .. - } => { - if bounds.intersects(node_bounds) { - max_ordering = cmp::max(*ordering, max_ordering); + for (pos, &child_idx) in children.as_slice().iter().enumerate().skip(1) { + let cost = bounds.union(&self.nodes[child_idx].bounds).half_perimeter(); + if cost < best_cost { + best_cost = cost; + best_child_idx = child_idx; + best_child_pos = pos; } } - Node::Internal { - left, - right, - bounds: node_bounds, - max_order: node_max_ordering, - .. - } => { - if bounds.intersects(node_bounds) && max_ordering < *node_max_ordering { - let left_max_ordering = self.nodes[*left].max_ordering(); - let right_max_ordering = self.nodes[*right].max_ordering(); - if left_max_ordering > right_max_ordering { - max_ordering = self.find_max_ordering(*left, bounds, max_ordering); - max_ordering = self.find_max_ordering(*right, bounds, max_ordering); + + // Check if best child is a leaf or internal + if matches!(self.nodes[best_child_idx].kind, NodeKind::Leaf { .. }) { + // Best child is a leaf. Check if current node has room for another child. + if children.len() < MAX_CHILDREN { + // Add new leaf directly to this node + let node = &mut self.nodes[current_idx]; + + if let NodeKind::Internal { children } = &mut node.kind { + children.push(new_leaf_idx); + // Swap new leaf only if it has the highest max_order + if order <= node.max_order { + let last = children.len() - 1; + children.indices.swap(last - 1, last); + } + } + + node.bounds = node.bounds.union(&bounds); + node.max_order = cmp::max(node.max_order, order); + break; + } else { + // Node is full, create new internal with [best_leaf, new_leaf] + let sibling_bounds = self.nodes[best_child_idx].bounds.clone(); + let sibling_order = self.nodes[best_child_idx].max_order; + + let mut new_children = NodeChildren::new(); + // Max end invariant + if order > sibling_order { + new_children.push(best_child_idx); + new_children.push(new_leaf_idx); } else { - max_ordering = self.find_max_ordering(*right, bounds, max_ordering); - max_ordering = self.find_max_ordering(*left, bounds, max_ordering); + new_children.push(new_leaf_idx); + new_children.push(best_child_idx); + } + + let new_internal_idx = self.nodes.len(); + let new_internal_max = cmp::max(sibling_order, order); + self.nodes.push(Node { + bounds: sibling_bounds.union(&bounds), + max_order: new_internal_max, + kind: NodeKind::Internal { + children: new_children, + }, + }); + + // Replace the leaf with the new internal in parent + let parent = &mut self.nodes[current_idx]; + if let NodeKind::Internal { children } = &mut parent.kind { + let children_len = children.len(); + + children.indices[best_child_pos] = new_internal_idx; + + // If new internal has highest max_order, swap it to the end + // to maintain sorting invariant + if new_internal_max > parent.max_order { + children.indices.swap(best_child_pos, children_len - 1); + } } + break; } + } else { + // Best child is internal, continue descent + current_idx = best_child_idx; } } - max_ordering - } - fn push_leaf(&mut self, bounds: Bounds, order: u32) -> usize { - self.nodes.push(Node::Leaf { bounds, order }); - self.nodes.len() - 1 - } + // Propagate bounds and max_order updates up the tree + let mut updated_child_idx = None; + for &node_idx in self.insert_path.iter().rev() { + let node = &mut self.nodes[node_idx]; + node.bounds = node.bounds.union(&bounds); - fn push_internal(&mut self, left: usize, right: usize) -> usize { - let left_node = &self.nodes[left]; - let right_node = &self.nodes[right]; - let new_bounds = left_node.bounds().union(right_node.bounds()); - let max_ordering = cmp::max(left_node.max_ordering(), right_node.max_ordering()); - self.nodes.push(Node::Internal { - bounds: new_bounds, - left, - right, - max_order: max_ordering, - }); - self.nodes.len() - 1 + if node.max_order < order { + node.max_order = order; + + // Swap updated child to end (skip first iteration since the invariant is already handled by previous cases) + if let Some(child_idx) = updated_child_idx { + if let NodeKind::Internal { children } = &mut node.kind { + if let Some(pos) = children.as_slice().iter().position(|&c| c == child_idx) + { + let last = children.len() - 1; + if pos != last { + children.indices.swap(pos, last); + } + } + } + } + } + + updated_child_idx = Some(node_idx); + } + + new_leaf_idx } } @@ -187,50 +356,11 @@ where { fn default() -> Self { BoundsTree { - root: None, nodes: Vec::new(), - stack: Vec::new(), - } - } -} - -#[derive(Debug, Clone)] -enum Node -where - U: Clone + Debug + Default + PartialEq, -{ - Leaf { - bounds: Bounds, - order: u32, - }, - Internal { - left: usize, - right: usize, - bounds: Bounds, - max_order: u32, - }, -} - -impl Node -where - U: Clone + Debug + Default + PartialEq, -{ - fn bounds(&self) -> &Bounds { - match self { - Node::Leaf { bounds, .. } => bounds, - Node::Internal { bounds, .. } => bounds, - } - } - - fn max_ordering(&self) -> u32 { - match self { - Node::Leaf { - order: ordering, .. - } => *ordering, - Node::Internal { - max_order: max_ordering, - .. - } => *max_ordering, + root: None, + max_leaf: None, + insert_path: Vec::new(), + search_stack: Vec::new(), } } } diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 821f155f96d168e5319d9a8981ca4be75df7b854..cf55edefaf70c080e171a8e21b350fd3c6d82f75 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -20,8 +20,8 @@ use crate::{ DispatchPhase, Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, Hitbox, HitboxBehavior, HitboxId, InspectorElementId, IntoElement, IsZero, KeyContext, KeyDownEvent, KeyUpEvent, KeyboardButton, KeyboardClickEvent, LayoutId, ModifiersChangedEvent, - MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Overflow, - ParentElement, Pixels, Point, Render, ScrollWheelEvent, SharedString, Size, Style, + MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, + Overflow, ParentElement, Pixels, Point, Render, ScrollWheelEvent, SharedString, Size, Style, StyleRefinement, Styled, Task, TooltipId, Visibility, Window, WindowControlArea, point, px, size, }; @@ -166,6 +166,38 @@ impl Interactivity { })); } + /// Bind the given callback to the mouse pressure event, during the bubble phase + /// the imperative API equivalent to [`InteractiveElement::on_mouse_pressure`]. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + pub fn on_mouse_pressure( + &mut self, + listener: impl Fn(&MousePressureEvent, &mut Window, &mut App) + 'static, + ) { + self.mouse_pressure_listeners + .push(Box::new(move |event, phase, hitbox, window, cx| { + if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) { + (listener)(event, window, cx) + } + })); + } + + /// Bind the given callback to the mouse pressure event, during the capture phase + /// the imperative API equivalent to [`InteractiveElement::on_mouse_pressure`]. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + pub fn capture_mouse_pressure( + &mut self, + listener: impl Fn(&MousePressureEvent, &mut Window, &mut App) + 'static, + ) { + self.mouse_pressure_listeners + .push(Box::new(move |event, phase, hitbox, window, cx| { + if phase == DispatchPhase::Capture && hitbox.is_hovered(window) { + (listener)(event, window, cx) + } + })); + } + /// Bind the given callback to the mouse up event for the given button, during the bubble phase. /// The imperative API equivalent to [`InteractiveElement::on_mouse_up`]. /// @@ -622,7 +654,7 @@ pub trait InteractiveElement: Sized { /// Set whether this element is a tab stop. /// /// When false, the element remains in tab-index order but cannot be reached via keyboard navigation. - /// Useful for container elements: focus the container, then call `window.focus_next()` to focus + /// Useful for container elements: focus the container, then call `window.focus_next(cx)` to focus /// the first tab stop inside it while having the container element itself be unreachable via the keyboard. /// Should only be used with `tab_index`. fn tab_stop(mut self, tab_stop: bool) -> Self { @@ -769,6 +801,30 @@ pub trait InteractiveElement: Sized { self } + /// Bind the given callback to the mouse pressure event, during the bubble phase + /// the fluent API equivalent to [`Interactivity::on_mouse_pressure`] + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + fn on_mouse_pressure( + mut self, + listener: impl Fn(&MousePressureEvent, &mut Window, &mut App) + 'static, + ) -> Self { + self.interactivity().on_mouse_pressure(listener); + self + } + + /// Bind the given callback to the mouse pressure event, during the capture phase + /// the fluent API equivalent to [`Interactivity::on_mouse_pressure`] + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + fn capture_mouse_pressure( + mut self, + listener: impl Fn(&MousePressureEvent, &mut Window, &mut App) + 'static, + ) -> Self { + self.interactivity().capture_mouse_pressure(listener); + self + } + /// Bind the given callback to the mouse down event, on any button, during the capture phase, /// when the mouse is outside of the bounds of this element. /// The fluent API equivalent to [`Interactivity::on_mouse_down_out`]. @@ -1197,7 +1253,8 @@ pub(crate) type MouseDownListener = Box; pub(crate) type MouseUpListener = Box; - +pub(crate) type MousePressureListener = + Box; pub(crate) type MouseMoveListener = Box; @@ -1521,6 +1578,7 @@ pub struct Interactivity { pub(crate) group_drag_over_styles: Vec<(TypeId, GroupStyle)>, pub(crate) mouse_down_listeners: Vec, pub(crate) mouse_up_listeners: Vec, + pub(crate) mouse_pressure_listeners: Vec, pub(crate) mouse_move_listeners: Vec, pub(crate) scroll_wheel_listeners: Vec, pub(crate) key_down_listeners: Vec, @@ -1714,6 +1772,7 @@ impl Interactivity { || self.group_hover_style.is_some() || self.hover_listener.is_some() || !self.mouse_up_listeners.is_empty() + || !self.mouse_pressure_listeners.is_empty() || !self.mouse_down_listeners.is_empty() || !self.mouse_move_listeners.is_empty() || !self.click_listeners.is_empty() @@ -2037,12 +2096,12 @@ impl Interactivity { // This behavior can be suppressed by using `cx.prevent_default()`. if let Some(focus_handle) = self.tracked_focus_handle.clone() { let hitbox = hitbox.clone(); - window.on_mouse_event(move |_: &MouseDownEvent, phase, window, _| { + window.on_mouse_event(move |_: &MouseDownEvent, phase, window, cx| { if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) && !window.default_prevented() { - window.focus(&focus_handle); + window.focus(&focus_handle, cx); // If there is a parent that is also focusable, prevent it // from transferring focus because we already did so. window.prevent_default(); @@ -2064,6 +2123,13 @@ impl Interactivity { }) } + for listener in self.mouse_pressure_listeners.drain(..) { + let hitbox = hitbox.clone(); + window.on_mouse_event(move |event: &MousePressureEvent, phase, window, cx| { + listener(event, phase, &hitbox, window, cx); + }) + } + for listener in self.mouse_move_listeners.drain(..) { let hitbox = hitbox.clone(); window.on_mouse_event(move |event: &MouseMoveEvent, phase, window, cx| { @@ -3193,7 +3259,11 @@ impl ScrollHandle { match active_item.strategy { ScrollStrategy::FirstVisible => { if state.overflow.y == Overflow::Scroll { - if bounds.top() + scroll_offset.y < state.bounds.top() { + let child_height = bounds.size.height; + let viewport_height = state.bounds.size.height; + if child_height > viewport_height { + scroll_offset.y = state.bounds.top() - bounds.top(); + } else if bounds.top() + scroll_offset.y < state.bounds.top() { scroll_offset.y = state.bounds.top() - bounds.top(); } else if bounds.bottom() + scroll_offset.y > state.bounds.bottom() { scroll_offset.y = state.bounds.bottom() - bounds.bottom(); @@ -3206,7 +3276,11 @@ impl ScrollHandle { } if state.overflow.x == Overflow::Scroll { - if bounds.left() + scroll_offset.x < state.bounds.left() { + let child_width = bounds.size.width; + let viewport_width = state.bounds.size.width; + if child_width > viewport_width { + scroll_offset.x = state.bounds.left() - bounds.left(); + } else if bounds.left() + scroll_offset.x < state.bounds.left() { scroll_offset.x = state.bounds.left() - bounds.left(); } else if bounds.right() + scroll_offset.x > state.bounds.right() { scroll_offset.x = state.bounds.right() - bounds.right(); @@ -3268,3 +3342,46 @@ impl ScrollHandle { self.0.borrow().child_bounds.len() } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn scroll_handle_aligns_wide_children_to_left_edge() { + let handle = ScrollHandle::new(); + { + let mut state = handle.0.borrow_mut(); + state.bounds = Bounds::new(point(px(0.), px(0.)), size(px(80.), px(20.))); + state.child_bounds = vec![Bounds::new(point(px(25.), px(0.)), size(px(200.), px(20.)))]; + state.overflow.x = Overflow::Scroll; + state.active_item = Some(ScrollActiveItem { + index: 0, + strategy: ScrollStrategy::default(), + }); + } + + handle.scroll_to_active_item(); + + assert_eq!(handle.offset().x, px(-25.)); + } + + #[test] + fn scroll_handle_aligns_tall_children_to_top_edge() { + let handle = ScrollHandle::new(); + { + let mut state = handle.0.borrow_mut(); + state.bounds = Bounds::new(point(px(0.), px(0.)), size(px(20.), px(80.))); + state.child_bounds = vec![Bounds::new(point(px(0.), px(25.)), size(px(20.), px(200.)))]; + state.overflow.y = Overflow::Scroll; + state.active_item = Some(ScrollActiveItem { + index: 0, + strategy: ScrollStrategy::default(), + }); + } + + handle.scroll_to_active_item(); + + assert_eq!(handle.offset().y, px(-25.)); + } +} diff --git a/crates/gpui/src/elements/surface.rs b/crates/gpui/src/elements/surface.rs index b4fced1001b3f9881b66f2f93e81588c750aa64c..ac1c247b47ec81bca06e458827786f549ca2d747 100644 --- a/crates/gpui/src/elements/surface.rs +++ b/crates/gpui/src/elements/surface.rs @@ -29,6 +29,7 @@ pub struct Surface { } /// Create a new surface element. +#[cfg(target_os = "macos")] pub fn surface(source: impl Into) -> Surface { Surface { source: source.into(), diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 914e8a286510a2ffd833db4c4d3ef85c84db073f..1b1bfd778c7bc746c67551eb31cf70f60b1485ea 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -6,6 +6,7 @@ use crate::{ register_tooltip_mouse_handlers, set_tooltip_on_window, }; use anyhow::Context as _; +use itertools::Itertools; use smallvec::SmallVec; use std::{ borrow::Cow, @@ -597,14 +598,14 @@ impl TextLayout { .unwrap() .lines .iter() - .map(|s| s.text.to_string()) - .collect::>() + .map(|s| &s.text) .join("\n") } /// The text for this layout (with soft-wraps as newlines) pub fn wrapped_text(&self) -> String { - let mut lines = Vec::new(); + let mut accumulator = String::new(); + for wrapped in self.0.borrow().as_ref().unwrap().lines.iter() { let mut seen = 0; for boundary in wrapped.layout.wrap_boundaries.iter() { @@ -612,13 +613,16 @@ impl TextLayout { [boundary.glyph_ix] .index; - lines.push(wrapped.text[seen..index].to_string()); + accumulator.push_str(&wrapped.text[seen..index]); + accumulator.push('\n'); seen = index; } - lines.push(wrapped.text[seen..].to_string()); + accumulator.push_str(&wrapped.text[seen..]); + accumulator.push('\n'); } - - lines.join("\n") + // Remove trailing newline + accumulator.pop(); + accumulator } } diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 93082563c02f4168b1d73e2929a6bf9dbd153237..a7486f0c00ac4e11ef807af90f6fb75b74b5d142 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -11,7 +11,7 @@ use crate::{ StyleRefinement, Styled, Window, point, size, }; use smallvec::SmallVec; -use std::{cell::RefCell, cmp, ops::Range, rc::Rc}; +use std::{cell::RefCell, cmp, ops::Range, rc::Rc, usize}; use super::ListHorizontalSizingBehavior; @@ -92,6 +92,10 @@ pub enum ScrollStrategy { /// May not be possible if there's not enough list items above the item scrolled to: /// in this case, the element will be placed at the closest possible position. Bottom, + /// If the element is not visible attempt to place it at: + /// - The top of the list's viewport if the target element is above currently visible elements. + /// - The bottom of the list's viewport if the target element is above currently visible elements. + Nearest, } #[derive(Clone, Copy, Debug)] @@ -231,6 +235,11 @@ impl UniformListScrollHandle { false } } + + /// Scroll to the bottom of the list. + pub fn scroll_to_bottom(&self) { + self.scroll_to_item(usize::MAX, ScrollStrategy::Bottom); + } } impl Styled for UniformList { @@ -391,39 +400,42 @@ impl Element for UniformList { scroll_offset.x = Pixels::ZERO; } - if let Some(deferred_scroll) = shared_scroll_to_item { - let mut ix = deferred_scroll.item_index; + if let Some(DeferredScrollToItem { + mut item_index, + mut strategy, + offset, + scroll_strict, + }) = shared_scroll_to_item + { if y_flipped { - ix = self.item_count.saturating_sub(ix + 1); + item_index = self.item_count.saturating_sub(item_index + 1); } let list_height = padded_bounds.size.height; let mut updated_scroll_offset = shared_scroll_offset.borrow_mut(); - let item_top = item_height * ix; + let item_top = item_height * item_index; let item_bottom = item_top + item_height; let scroll_top = -updated_scroll_offset.y; - let offset_pixels = item_height * deferred_scroll.offset; - let mut scrolled_to_top = false; - - if item_top < scroll_top + offset_pixels { - scrolled_to_top = true; - // todo: using the padding here is wrong - this only works well for few scenarios - updated_scroll_offset.y = -item_top + padding.top + offset_pixels; - } else if item_bottom > scroll_top + list_height { - scrolled_to_top = true; - updated_scroll_offset.y = -(item_bottom - list_height); - } + let offset_pixels = item_height * offset; + + // is the selected item above/below currently visible items + let is_above = item_top < scroll_top + offset_pixels; + let is_below = item_bottom > scroll_top + list_height; + + if scroll_strict || is_above || is_below { + if strategy == ScrollStrategy::Nearest { + if is_above { + strategy = ScrollStrategy::Top; + } else if is_below { + strategy = ScrollStrategy::Bottom; + } + } - if deferred_scroll.scroll_strict - || (scrolled_to_top - && (item_top < scroll_top + offset_pixels - || item_bottom > scroll_top + list_height)) - { - match deferred_scroll.strategy { + let max_scroll_offset = + (content_height - list_height).max(Pixels::ZERO); + match strategy { ScrollStrategy::Top => { updated_scroll_offset.y = -(item_top - offset_pixels) - .max(Pixels::ZERO) - .min(content_height - list_height) - .max(Pixels::ZERO); + .clamp(Pixels::ZERO, max_scroll_offset); } ScrollStrategy::Center => { let item_center = item_top + item_height / 2.0; @@ -431,18 +443,15 @@ impl Element for UniformList { let viewport_height = list_height - offset_pixels; let viewport_center = offset_pixels + viewport_height / 2.0; let target_scroll_top = item_center - viewport_center; - - updated_scroll_offset.y = -target_scroll_top - .max(Pixels::ZERO) - .min(content_height - list_height) - .max(Pixels::ZERO); + updated_scroll_offset.y = + -target_scroll_top.clamp(Pixels::ZERO, max_scroll_offset); } ScrollStrategy::Bottom => { - updated_scroll_offset.y = -(item_bottom - list_height - + offset_pixels) - .max(Pixels::ZERO) - .min(content_height - list_height) - .max(Pixels::ZERO); + updated_scroll_offset.y = -(item_bottom - list_height) + .clamp(Pixels::ZERO, max_scroll_offset); + } + ScrollStrategy::Nearest => { + // Nearest, but the item is visible -> no scroll is required } } } @@ -659,9 +668,9 @@ impl UniformList { } /// Track and render scroll state of this list with reference to the given scroll handle. - pub fn track_scroll(mut self, handle: UniformListScrollHandle) -> Self { + pub fn track_scroll(mut self, handle: &UniformListScrollHandle) -> Self { self.interactivity.tracked_scroll_handle = Some(handle.0.borrow().base_handle.clone()); - self.scroll_handle = Some(handle); + self.scroll_handle = Some(handle.clone()); self } @@ -695,3 +704,150 @@ impl InteractiveElement for UniformList { &mut self.interactivity } } + +#[cfg(test)] +mod test { + use crate::TestAppContext; + + #[gpui::test] + fn test_scroll_strategy_nearest(cx: &mut TestAppContext) { + use crate::{ + Context, FocusHandle, ScrollStrategy, UniformListScrollHandle, Window, div, prelude::*, + px, uniform_list, + }; + use std::ops::Range; + + actions!(example, [SelectNext, SelectPrev]); + + struct TestView { + index: usize, + length: usize, + scroll_handle: UniformListScrollHandle, + focus_handle: FocusHandle, + visible_range: Range, + } + + impl TestView { + pub fn select_next( + &mut self, + _: &SelectNext, + window: &mut Window, + _: &mut Context, + ) { + if self.index + 1 == self.length { + self.index = 0 + } else { + self.index += 1; + } + self.scroll_handle + .scroll_to_item(self.index, ScrollStrategy::Nearest); + window.refresh(); + } + + pub fn select_previous( + &mut self, + _: &SelectPrev, + window: &mut Window, + _: &mut Context, + ) { + if self.index == 0 { + self.index = self.length - 1 + } else { + self.index -= 1; + } + self.scroll_handle + .scroll_to_item(self.index, ScrollStrategy::Nearest); + window.refresh(); + } + } + + impl Render for TestView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .id("list-example") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .size_full() + .child( + uniform_list( + "entries", + self.length, + cx.processor(|this, range: Range, _window, _cx| { + this.visible_range = range.clone(); + range + .map(|ix| div().id(ix).h(px(20.0)).child(format!("Item {ix}"))) + .collect() + }), + ) + .track_scroll(&self.scroll_handle) + .h(px(200.0)), + ) + } + } + + let (view, cx) = cx.add_window_view(|window, cx| { + let focus_handle = cx.focus_handle(); + window.focus(&focus_handle, cx); + TestView { + scroll_handle: UniformListScrollHandle::new(), + index: 0, + focus_handle, + length: 47, + visible_range: 0..0, + } + }); + + // 10 out of 47 items are visible + + // First 9 times selecting next item does not scroll + for ix in 1..10 { + cx.dispatch_action(SelectNext); + view.read_with(cx, |view, _| { + assert_eq!(view.index, ix); + assert_eq!(view.visible_range, 0..10); + }) + } + + // Now each time the list scrolls down by 1 + for ix in 10..47 { + cx.dispatch_action(SelectNext); + view.read_with(cx, |view, _| { + assert_eq!(view.index, ix); + assert_eq!(view.visible_range, ix - 9..ix + 1); + }) + } + + // After the last item we move back to the start + cx.dispatch_action(SelectNext); + view.read_with(cx, |view, _| { + assert_eq!(view.index, 0); + assert_eq!(view.visible_range, 0..10); + }); + + // Return to the last element + cx.dispatch_action(SelectPrev); + view.read_with(cx, |view, _| { + assert_eq!(view.index, 46); + assert_eq!(view.visible_range, 37..47); + }); + + // First 9 times selecting previous does not scroll + for ix in (37..46).rev() { + cx.dispatch_action(SelectPrev); + view.read_with(cx, |view, _| { + assert_eq!(view.index, ix); + assert_eq!(view.visible_range, 37..47); + }) + } + + // Now each time the list scrolls up by 1 + for ix in (0..37).rev() { + cx.dispatch_action(SelectPrev); + view.read_with(cx, |view, _| { + assert_eq!(view.index, ix); + assert_eq!(view.visible_range, ix..ix + 10); + }) + } + } +} diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c05cf5745d6e19172191e298fa4f31e76513a00b..6c2ecb341ff2fe446efd7823c107fd32a557feb5 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -1,6 +1,7 @@ -use crate::{App, PlatformDispatcher}; +use crate::{App, PlatformDispatcher, RunnableMeta, RunnableVariant, TaskTiming, profiler}; use async_task::Runnable; use futures::channel::mpsc; +use parking_lot::{Condvar, Mutex}; use smol::prelude::*; use std::{ fmt::Debug, @@ -46,6 +47,52 @@ pub struct ForegroundExecutor { not_send: PhantomData>, } +/// Realtime task priority +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +#[repr(u8)] +pub enum RealtimePriority { + /// Audio task + Audio, + /// Other realtime task + #[default] + Other, +} + +/// Task priority +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +#[repr(u8)] +pub enum Priority { + /// Realtime priority + /// + /// Spawning a task with this priority will spin it off on a separate thread dedicated just to that task. + Realtime(RealtimePriority), + /// High priority + /// + /// Only use for tasks that are critical to the user experience / responsiveness of the editor. + High, + /// Medium priority, probably suits most of your use cases. + #[default] + Medium, + /// Low priority + /// + /// Prioritize this for background work that can come in large quantities + /// to not starve the executor of resources for high priority tasks + Low, +} + +impl Priority { + #[allow(dead_code)] + pub(crate) const fn probability(&self) -> u32 { + match self { + // realtime priorities are not considered for probability scheduling + Priority::Realtime(_) => 0, + Priority::High => 60, + Priority::Medium => 30, + Priority::Low => 10, + } + } +} + /// Task is a primitive that allows work to happen in the background. /// /// It implements [`Future`] so you can `.await` on it. @@ -62,7 +109,7 @@ enum TaskState { Ready(Option), /// A task that is currently running. - Spawned(async_task::Task), + Spawned(async_task::Task), } impl Task { @@ -146,15 +193,87 @@ impl BackgroundExecutor { } /// Enqueues the given future to be run to completion on a background thread. + #[track_caller] pub fn spawn(&self, future: impl Future + Send + 'static) -> Task where R: Send + 'static, { - self.spawn_internal::(Box::pin(future), None) + self.spawn_with_priority(Priority::default(), future) + } + + /// Enqueues the given future to be run to completion on a background thread. + #[track_caller] + pub fn spawn_with_priority( + &self, + priority: Priority, + future: impl Future + Send + 'static, + ) -> Task + where + R: Send + 'static, + { + self.spawn_internal::(Box::pin(future), None, priority) + } + + /// Enqueues the given future to be run to completion on a background thread and blocking the current task on it. + /// + /// This allows to spawn background work that borrows from its scope. Note that the supplied future will run to + /// completion before the current task is resumed, even if the current task is slated for cancellation. + pub async fn await_on_background(&self, future: impl Future + Send) -> R + where + R: Send, + { + // We need to ensure that cancellation of the parent task does not drop the environment + // before the our own task has completed or got cancelled. + struct NotifyOnDrop<'a>(&'a (Condvar, Mutex)); + + impl Drop for NotifyOnDrop<'_> { + fn drop(&mut self) { + *self.0.1.lock() = true; + self.0.0.notify_all(); + } + } + + struct WaitOnDrop<'a>(&'a (Condvar, Mutex)); + + impl Drop for WaitOnDrop<'_> { + fn drop(&mut self) { + let mut done = self.0.1.lock(); + if !*done { + self.0.0.wait(&mut done); + } + } + } + + let dispatcher = self.dispatcher.clone(); + let location = core::panic::Location::caller(); + + let pair = &(Condvar::new(), Mutex::new(false)); + let _wait_guard = WaitOnDrop(pair); + + let (runnable, task) = unsafe { + async_task::Builder::new() + .metadata(RunnableMeta { location }) + .spawn_unchecked( + move |_| async { + let _notify_guard = NotifyOnDrop(pair); + future.await + }, + move |runnable| { + dispatcher.dispatch( + RunnableVariant::Meta(runnable), + None, + Priority::default(), + ) + }, + ) + }; + runnable.schedule(); + task.await } /// Enqueues the given future to be run to completion on a background thread. /// The given label can be used to control the priority of the task in tests. + #[track_caller] pub fn spawn_labeled( &self, label: TaskLabel, @@ -163,17 +282,73 @@ impl BackgroundExecutor { where R: Send + 'static, { - self.spawn_internal::(Box::pin(future), Some(label)) + self.spawn_internal::(Box::pin(future), Some(label), Priority::default()) } + #[track_caller] fn spawn_internal( &self, future: AnyFuture, label: Option, + #[cfg_attr( + target_os = "windows", + expect( + unused_variables, + reason = "Multi priority scheduler is broken on windows" + ) + )] + priority: Priority, ) -> Task { let dispatcher = self.dispatcher.clone(); - let (runnable, task) = - async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable, label)); + #[cfg(target_os = "windows")] + let priority = Priority::Medium; // multi-prio scheduler is broken on windows + + let (runnable, task) = if let Priority::Realtime(realtime) = priority { + let location = core::panic::Location::caller(); + let (mut tx, rx) = flume::bounded::>(1); + + dispatcher.spawn_realtime( + realtime, + Box::new(move || { + while let Ok(runnable) = rx.recv() { + let start = Instant::now(); + let location = runnable.metadata().location; + let mut timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + + let end = Instant::now(); + timing.end = Some(end); + profiler::add_task_timing(timing); + } + }), + ); + + async_task::Builder::new() + .metadata(RunnableMeta { location }) + .spawn( + move |_| future, + move |runnable| { + let _ = tx.send(runnable); + }, + ) + } else { + let location = core::panic::Location::caller(); + async_task::Builder::new() + .metadata(RunnableMeta { location }) + .spawn( + move |_| future, + move |runnable| { + dispatcher.dispatch(RunnableVariant::Meta(runnable), label, priority) + }, + ) + }; + runnable.schedule(); Task(TaskState::Spawned(task)) } @@ -281,7 +456,11 @@ impl BackgroundExecutor { }); let mut cx = std::task::Context::from_waker(&waker); - let duration = Duration::from_secs(180); + let duration = Duration::from_secs( + option_env!("GPUI_TEST_TIMEOUT") + .and_then(|s| s.parse::().ok()) + .unwrap_or(180), + ); let mut test_should_end_by = Instant::now() + duration; loop { @@ -315,10 +494,8 @@ impl BackgroundExecutor { "parked with nothing left to run{waiting_message}{backtrace_message}", ) } - dispatcher.set_unparker(unparker.clone()); - parker.park_timeout( - test_should_end_by.saturating_duration_since(Instant::now()), - ); + dispatcher.push_unparker(unparker.clone()); + parker.park_timeout(Duration::from_millis(1)); if Instant::now() > test_should_end_by { panic!("test timed out after {duration:?} with allow_parking") } @@ -344,11 +521,28 @@ impl BackgroundExecutor { where F: FnOnce(&mut Scope<'scope>), { - let mut scope = Scope::new(self.clone()); + let mut scope = Scope::new(self.clone(), Priority::default()); + (scheduler)(&mut scope); + let spawned = mem::take(&mut scope.futures) + .into_iter() + .map(|f| self.spawn_with_priority(scope.priority, f)) + .collect::>(); + for task in spawned { + task.await; + } + } + + /// Scoped lets you start a number of tasks and waits + /// for all of them to complete before returning. + pub async fn scoped_priority<'scope, F>(&self, priority: Priority, scheduler: F) + where + F: FnOnce(&mut Scope<'scope>), + { + let mut scope = Scope::new(self.clone(), priority); (scheduler)(&mut scope); let spawned = mem::take(&mut scope.futures) .into_iter() - .map(|f| self.spawn(f)) + .map(|f| self.spawn_with_priority(scope.priority, f)) .collect::>(); for task in spawned { task.await; @@ -370,10 +564,13 @@ impl BackgroundExecutor { if duration.is_zero() { return Task::ready(()); } - let (runnable, task) = async_task::spawn(async move {}, { - let dispatcher = self.dispatcher.clone(); - move |runnable| dispatcher.dispatch_after(duration, runnable) - }); + let location = core::panic::Location::caller(); + let (runnable, task) = async_task::Builder::new() + .metadata(RunnableMeta { location }) + .spawn(move |_| async move {}, { + let dispatcher = self.dispatcher.clone(); + move |runnable| dispatcher.dispatch_after(duration, RunnableVariant::Meta(runnable)) + }); runnable.schedule(); Task(TaskState::Spawned(task)) } @@ -479,24 +676,45 @@ impl ForegroundExecutor { } /// Enqueues the given Task to run on the main thread at some point in the future. + #[track_caller] pub fn spawn(&self, future: impl Future + 'static) -> Task + where + R: 'static, + { + self.spawn_with_priority(Priority::default(), future) + } + + /// Enqueues the given Task to run on the main thread at some point in the future. + #[track_caller] + pub fn spawn_with_priority( + &self, + priority: Priority, + future: impl Future + 'static, + ) -> Task where R: 'static, { let dispatcher = self.dispatcher.clone(); + let location = core::panic::Location::caller(); #[track_caller] fn inner( dispatcher: Arc, future: AnyLocalFuture, + location: &'static core::panic::Location<'static>, + priority: Priority, ) -> Task { - let (runnable, task) = spawn_local_with_source_location(future, move |runnable| { - dispatcher.dispatch_on_main_thread(runnable) - }); + let (runnable, task) = spawn_local_with_source_location( + future, + move |runnable| { + dispatcher.dispatch_on_main_thread(RunnableVariant::Meta(runnable), priority) + }, + RunnableMeta { location }, + ); runnable.schedule(); Task(TaskState::Spawned(task)) } - inner::(dispatcher, Box::pin(future)) + inner::(dispatcher, Box::pin(future), location, priority) } } @@ -505,14 +723,16 @@ impl ForegroundExecutor { /// Copy-modified from: /// #[track_caller] -fn spawn_local_with_source_location( +fn spawn_local_with_source_location( future: Fut, schedule: S, -) -> (Runnable<()>, async_task::Task) + metadata: M, +) -> (Runnable, async_task::Task) where Fut: Future + 'static, Fut::Output: 'static, - S: async_task::Schedule<()> + Send + Sync + 'static, + S: async_task::Schedule + Send + Sync + 'static, + M: 'static, { #[inline] fn thread_id() -> ThreadId { @@ -560,12 +780,17 @@ where location: Location::caller(), }; - unsafe { async_task::spawn_unchecked(future, schedule) } + unsafe { + async_task::Builder::new() + .metadata(metadata) + .spawn_unchecked(move |_| future, schedule) + } } /// Scope manages a set of tasks that are enqueued and waited on together. See [`BackgroundExecutor::scoped`]. pub struct Scope<'a> { executor: BackgroundExecutor, + priority: Priority, futures: Vec + Send + 'static>>>, tx: Option>, rx: mpsc::Receiver<()>, @@ -573,10 +798,11 @@ pub struct Scope<'a> { } impl<'a> Scope<'a> { - fn new(executor: BackgroundExecutor) -> Self { + fn new(executor: BackgroundExecutor, priority: Priority) -> Self { let (tx, rx) = mpsc::channel(1); Self { executor, + priority, tx: Some(tx), rx, futures: Default::default(), @@ -590,6 +816,7 @@ impl<'a> Scope<'a> { } /// Spawn a future into this scope. + #[track_caller] pub fn spawn(&mut self, f: F) where F: Future + Send + 'a, diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index fa6f90b9ac9949ed7b5444e13045aaef6f9c0224..fc735ba5e0e7e719ed12b6b1b168ec3ee49e22bb 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -748,7 +748,7 @@ impl Size { /// assert_eq!(bounds.origin, origin); /// assert_eq!(bounds.size, size); /// ``` -#[derive(Refineable, Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] +#[derive(Refineable, Copy, Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)] #[refineable(Debug)] #[repr(C)] pub struct Bounds { @@ -1416,9 +1416,9 @@ where /// ``` pub fn contains(&self, point: &Point) -> bool { point.x >= self.origin.x - && point.x <= self.origin.x.clone() + self.size.width.clone() + && point.x < self.origin.x.clone() + self.size.width.clone() && point.y >= self.origin.y - && point.y <= self.origin.y.clone() + self.size.height.clone() + && point.y < self.origin.y.clone() + self.size.height.clone() } /// Checks if this bounds is completely contained within another bounds. @@ -1676,8 +1676,6 @@ impl Bounds { } } -impl Copy for Bounds {} - /// Represents the edges of a box in a 2D space, such as padding or margin. /// /// Each field represents the size of the edge on one side of the box: `top`, `right`, `bottom`, and `left`. @@ -2650,6 +2648,18 @@ impl Debug for Pixels { } } +impl std::iter::Sum for Pixels { + fn sum>(iter: I) -> Self { + iter.fold(Self::ZERO, |a, b| a + b) + } +} + +impl<'a> std::iter::Sum<&'a Pixels> for Pixels { + fn sum>(iter: I) -> Self { + iter.fold(Self::ZERO, |a, b| a + *b) + } +} + impl TryFrom<&'_ str> for Pixels { type Error = anyhow::Error; @@ -3569,7 +3579,7 @@ pub const fn relative(fraction: f32) -> DefiniteLength { } /// Returns the Golden Ratio, i.e. `~(1.0 + sqrt(5.0)) / 2.0`. -pub fn phi() -> DefiniteLength { +pub const fn phi() -> DefiniteLength { relative(1.618_034) } @@ -3582,7 +3592,7 @@ pub fn phi() -> DefiniteLength { /// # Returns /// /// A `Rems` representing the specified number of rems. -pub fn rems(rems: f32) -> Rems { +pub const fn rems(rems: f32) -> Rems { Rems(rems) } @@ -3610,7 +3620,7 @@ pub const fn px(pixels: f32) -> Pixels { /// # Returns /// /// A `Length` variant set to `Auto`. -pub fn auto() -> Length { +pub const fn auto() -> Length { Length::Auto } diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 098c0780b2cc52c4dbfff4f65c8b59277fd9fa84..76a61e286d3fe6c1acae8e4e628d4c9130f1305f 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -30,6 +30,9 @@ mod keymap; mod path_builder; mod platform; pub mod prelude; +mod profiler; +#[cfg(target_os = "linux")] +mod queue; mod scene; mod shared_string; mod shared_uri; @@ -87,16 +90,21 @@ use key_dispatch::*; pub use keymap::*; pub use path_builder::*; pub use platform::*; +pub use profiler::*; +#[cfg(target_os = "linux")] +pub(crate) use queue::{PriorityQueueReceiver, PriorityQueueSender}; pub use refineable::*; pub use scene::*; pub use shared_string::*; pub use shared_uri::*; pub use smol::Timer; +use std::{any::Any, future::Future}; pub use style::*; pub use styled::*; pub use subscription::*; pub use svg_renderer::*; pub(crate) use tab_stop::*; +use taffy::TaffyLayoutEngine; pub use taffy::{AvailableSpace, LayoutId}; #[cfg(any(test, feature = "test-support"))] pub use test::*; @@ -107,9 +115,6 @@ pub use util::{FutureExt, Timeout, arc_cow::ArcCow}; pub use view::*; pub use window::*; -use std::{any::Any, future::Future}; -use taffy::TaffyLayoutEngine; - /// The context trait, allows the different contexts in GPUI to be used /// interchangeably for certain operations. pub trait AppContext { diff --git a/crates/gpui/src/interactive.rs b/crates/gpui/src/interactive.rs index 8659433278d13c84a11312127639f700bfbb9cdc..a500ac46f0bbf96fc2b9d326a3a61da42c40b7ec 100644 --- a/crates/gpui/src/interactive.rs +++ b/crates/gpui/src/interactive.rs @@ -174,6 +174,40 @@ pub struct MouseClickEvent { pub up: MouseUpEvent, } +/// The stage of a pressure click event. +#[derive(Clone, Copy, Debug, Default, PartialEq)] +pub enum PressureStage { + /// No pressure. + #[default] + Zero, + /// Normal click pressure. + Normal, + /// High pressure, enough to trigger a force click. + Force, +} + +/// A mouse pressure event from the platform. Generated when a force-sensitive trackpad is pressed hard. +/// Currently only implemented for macOS trackpads. +#[derive(Debug, Clone, Default)] +pub struct MousePressureEvent { + /// Pressure of the current stage as a float between 0 and 1 + pub pressure: f32, + /// The pressure stage of the event. + pub stage: PressureStage, + /// The position of the mouse on the window. + pub position: Point, + /// The modifiers that were held down when the mouse pressure changed. + pub modifiers: Modifiers, +} + +impl Sealed for MousePressureEvent {} +impl InputEvent for MousePressureEvent { + fn to_platform_input(self) -> PlatformInput { + PlatformInput::MousePressure(self) + } +} +impl MouseEvent for MousePressureEvent {} + /// A click event that was generated by a keyboard button being pressed and released. #[derive(Clone, Debug, Default)] pub struct KeyboardClickEvent { @@ -305,9 +339,10 @@ pub enum KeyboardButton { } /// An enum representing the mouse button that was pressed. -#[derive(Hash, PartialEq, Eq, Copy, Clone, Debug)] +#[derive(Hash, Default, PartialEq, Eq, Copy, Clone, Debug)] pub enum MouseButton { /// The left mouse button. + #[default] Left, /// The right mouse button. @@ -333,28 +368,17 @@ impl MouseButton { } } -impl Default for MouseButton { - fn default() -> Self { - Self::Left - } -} - /// A navigation direction, such as back or forward. -#[derive(Hash, PartialEq, Eq, Copy, Clone, Debug)] +#[derive(Hash, Default, PartialEq, Eq, Copy, Clone, Debug)] pub enum NavigationDirection { /// The back button. + #[default] Back, /// The forward button. Forward, } -impl Default for NavigationDirection { - fn default() -> Self { - Self::Back - } -} - /// A mouse move event from the platform. #[derive(Clone, Debug, Default)] pub struct MouseMoveEvent { @@ -519,7 +543,7 @@ impl Deref for MouseExitEvent { } /// A collection of paths from the platform, such as from a file drop. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Eq, PartialEq)] pub struct ExternalPaths(pub(crate) SmallVec<[PathBuf; 2]>); impl ExternalPaths { @@ -581,6 +605,8 @@ pub enum PlatformInput { MouseDown(MouseDownEvent), /// The mouse was released. MouseUp(MouseUpEvent), + /// Mouse pressure. + MousePressure(MousePressureEvent), /// The mouse was moved. MouseMove(MouseMoveEvent), /// The mouse exited the window. @@ -600,6 +626,7 @@ impl PlatformInput { PlatformInput::MouseDown(event) => Some(event), PlatformInput::MouseUp(event) => Some(event), PlatformInput::MouseMove(event) => Some(event), + PlatformInput::MousePressure(event) => Some(event), PlatformInput::MouseExited(event) => Some(event), PlatformInput::ScrollWheel(event) => Some(event), PlatformInput::FileDrop(event) => Some(event), @@ -614,6 +641,7 @@ impl PlatformInput { PlatformInput::MouseDown(_) => None, PlatformInput::MouseUp(_) => None, PlatformInput::MouseMove(_) => None, + PlatformInput::MousePressure(_) => None, PlatformInput::MouseExited(_) => None, PlatformInput::ScrollWheel(_) => None, PlatformInput::FileDrop(_) => None, @@ -677,8 +705,8 @@ mod test { }); window - .update(cx, |test_view, window, _cx| { - window.focus(&test_view.focus_handle) + .update(cx, |test_view, window, cx| { + window.focus(&test_view.focus_handle, cx) }) .unwrap(); diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index f0c857abd6f3c353105b4272b51ca519f1906078..1b92b9fe3ffabdbeec4bc7450adc1439e8e223eb 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -121,6 +121,7 @@ pub(crate) struct Replay { #[derive(Default, Debug)] pub(crate) struct DispatchResult { pub(crate) pending: SmallVec<[Keystroke; 1]>, + pub(crate) pending_has_binding: bool, pub(crate) bindings: SmallVec<[KeyBinding; 1]>, pub(crate) to_replay: SmallVec<[Replay; 1]>, pub(crate) context_stack: Vec, @@ -461,6 +462,17 @@ impl DispatchTree { (bindings, partial, context_stack) } + /// Find the bindings that can follow the current input sequence. + pub fn possible_next_bindings_for_input( + &self, + input: &[Keystroke], + context_stack: &[KeyContext], + ) -> Vec { + self.keymap + .borrow() + .possible_next_bindings_for_input(input, context_stack) + } + /// dispatch_key processes the keystroke /// input should be set to the value of `pending` from the previous call to dispatch_key. /// This returns three instructions to the input handler: @@ -480,6 +492,7 @@ impl DispatchTree { if pending { return DispatchResult { pending: input, + pending_has_binding: !bindings.is_empty(), context_stack, ..Default::default() }; @@ -608,15 +621,17 @@ impl DispatchTree { #[cfg(test)] mod tests { use crate::{ - self as gpui, Element, ElementId, GlobalElementId, InspectorElementId, LayoutId, Style, + self as gpui, AppContext, DispatchResult, Element, ElementId, GlobalElementId, + InspectorElementId, Keystroke, LayoutId, Style, }; use core::panic; + use smallvec::SmallVec; use std::{cell::RefCell, ops::Range, rc::Rc}; use crate::{ Action, ActionRegistry, App, Bounds, Context, DispatchTree, FocusHandle, InputHandler, - IntoElement, KeyBinding, KeyContext, Keymap, Pixels, Point, Render, TestAppContext, - UTF16Selection, Window, + IntoElement, KeyBinding, KeyContext, Keymap, Pixels, Point, Render, Subscription, + TestAppContext, UTF16Selection, Window, }; #[derive(PartialEq, Eq)] @@ -676,6 +691,256 @@ mod tests { assert!(keybinding[0].action.partial_eq(&TestAction)) } + #[test] + fn test_pending_has_binding_state() { + let bindings = vec![ + KeyBinding::new("ctrl-b h", TestAction, None), + KeyBinding::new("space", TestAction, Some("ContextA")), + KeyBinding::new("space f g", TestAction, Some("ContextB")), + ]; + let keymap = Rc::new(RefCell::new(Keymap::new(bindings))); + let mut registry = ActionRegistry::default(); + registry.load_action::(); + let mut tree = DispatchTree::new(keymap, Rc::new(registry)); + + type DispatchPath = SmallVec<[super::DispatchNodeId; 32]>; + fn dispatch( + tree: &mut DispatchTree, + pending: SmallVec<[Keystroke; 1]>, + key: &str, + path: &DispatchPath, + ) -> DispatchResult { + tree.dispatch_key(pending, Keystroke::parse(key).unwrap(), path) + } + + let dispatch_path: DispatchPath = SmallVec::new(); + let result = dispatch(&mut tree, SmallVec::new(), "ctrl-b", &dispatch_path); + assert_eq!(result.pending.len(), 1); + assert!(!result.pending_has_binding); + + let result = dispatch(&mut tree, result.pending, "h", &dispatch_path); + assert_eq!(result.pending.len(), 0); + assert_eq!(result.bindings.len(), 1); + assert!(!result.pending_has_binding); + + let node_id = tree.push_node(); + tree.set_key_context(KeyContext::parse("ContextB").unwrap()); + tree.pop_node(); + + let dispatch_path = tree.dispatch_path(node_id); + let result = dispatch(&mut tree, SmallVec::new(), "space", &dispatch_path); + + assert_eq!(result.pending.len(), 1); + assert!(!result.pending_has_binding); + } + + #[crate::test] + fn test_pending_input_observers_notified_on_focus_change(cx: &mut TestAppContext) { + #[derive(Clone)] + struct CustomElement { + focus_handle: FocusHandle, + text: Rc>, + } + + impl CustomElement { + fn new(cx: &mut Context) -> Self { + Self { + focus_handle: cx.focus_handle(), + text: Rc::default(), + } + } + } + + impl Element for CustomElement { + type RequestLayoutState = (); + + type PrepaintState = (); + + fn id(&self) -> Option { + Some("custom".into()) + } + + fn source_location(&self) -> Option<&'static panic::Location<'static>> { + None + } + + fn request_layout( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + window: &mut Window, + cx: &mut App, + ) -> (LayoutId, Self::RequestLayoutState) { + (window.request_layout(Style::default(), [], cx), ()) + } + + fn prepaint( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + _: Bounds, + _: &mut Self::RequestLayoutState, + window: &mut Window, + cx: &mut App, + ) -> Self::PrepaintState { + window.set_focus_handle(&self.focus_handle, cx); + } + + fn paint( + &mut self, + _: Option<&GlobalElementId>, + _: Option<&InspectorElementId>, + _: Bounds, + _: &mut Self::RequestLayoutState, + _: &mut Self::PrepaintState, + window: &mut Window, + cx: &mut App, + ) { + let mut key_context = KeyContext::default(); + key_context.add("Terminal"); + window.set_key_context(key_context); + window.handle_input(&self.focus_handle, self.clone(), cx); + window.on_action(std::any::TypeId::of::(), |_, _, _, _| {}); + } + } + + impl IntoElement for CustomElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } + } + + impl InputHandler for CustomElement { + fn selected_text_range( + &mut self, + _: bool, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + + fn marked_text_range(&mut self, _: &mut Window, _: &mut App) -> Option> { + None + } + + fn text_for_range( + &mut self, + _: Range, + _: &mut Option>, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + + fn replace_text_in_range( + &mut self, + replacement_range: Option>, + text: &str, + _: &mut Window, + _: &mut App, + ) { + if replacement_range.is_some() { + unimplemented!() + } + self.text.borrow_mut().push_str(text) + } + + fn replace_and_mark_text_in_range( + &mut self, + replacement_range: Option>, + new_text: &str, + _: Option>, + _: &mut Window, + _: &mut App, + ) { + if replacement_range.is_some() { + unimplemented!() + } + self.text.borrow_mut().push_str(new_text) + } + + fn unmark_text(&mut self, _: &mut Window, _: &mut App) {} + + fn bounds_for_range( + &mut self, + _: Range, + _: &mut Window, + _: &mut App, + ) -> Option> { + None + } + + fn character_index_for_point( + &mut self, + _: Point, + _: &mut Window, + _: &mut App, + ) -> Option { + None + } + } + + impl Render for CustomElement { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + self.clone() + } + } + + cx.update(|cx| { + cx.bind_keys([KeyBinding::new("ctrl-b", TestAction, Some("Terminal"))]); + cx.bind_keys([KeyBinding::new("ctrl-b h", TestAction, Some("Terminal"))]); + }); + + let (test, cx) = cx.add_window_view(|_, cx| CustomElement::new(cx)); + let focus_handle = test.update(cx, |test, _| test.focus_handle.clone()); + + let pending_input_changed_count = Rc::new(RefCell::new(0usize)); + let pending_input_changed_count_for_observer = pending_input_changed_count.clone(); + + struct PendingInputObserver { + _subscription: Subscription, + } + + let _observer = cx.update(|window, cx| { + cx.new(|cx| PendingInputObserver { + _subscription: cx.observe_pending_input(window, move |_, _, _| { + *pending_input_changed_count_for_observer.borrow_mut() += 1; + }), + }) + }); + + cx.update(|window, cx| { + window.focus(&focus_handle, cx); + window.activate_window(); + }); + + cx.simulate_keystrokes("ctrl-b"); + + let count_after_pending = Rc::new(RefCell::new(0usize)); + let count_after_pending_for_assertion = count_after_pending.clone(); + + cx.update(|window, cx| { + assert!(window.has_pending_keystrokes()); + *count_after_pending.borrow_mut() = *pending_input_changed_count.borrow(); + assert!(*count_after_pending.borrow() > 0); + + window.focus(&cx.focus_handle(), cx); + + assert!(!window.has_pending_keystrokes()); + }); + + // Focus-triggered pending-input notifications are deferred to the end of the current + // effect cycle, so the observer callback should run after the focus update completes. + cx.update(|_, _| { + let count_after_focus_change = *pending_input_changed_count.borrow(); + assert!(count_after_focus_change > *count_after_pending_for_assertion.borrow()); + }); + } + #[crate::test] fn test_input_handler_pending(cx: &mut TestAppContext) { #[derive(Clone)] @@ -829,8 +1094,9 @@ mod tests { cx.bind_keys([KeyBinding::new("ctrl-b h", TestAction, Some("Terminal"))]); }); let (test, cx) = cx.add_window_view(|_, cx| CustomElement::new(cx)); + let focus_handle = test.update(cx, |test, _| test.focus_handle.clone()); cx.update(|window, cx| { - window.focus(&test.read(cx).focus_handle); + window.focus(&focus_handle, cx); window.activate_window(); }); cx.simulate_keystrokes("ctrl-b ["); diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index 33d956917055942cce365e9069cbb007e202eaf2..d5398ff0447849ca5bfcdbbb5a838af0cbc22836 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -215,6 +215,41 @@ impl Keymap { Some(contexts.len()) } } + + /// Find the bindings that can follow the current input sequence. + pub fn possible_next_bindings_for_input( + &self, + input: &[Keystroke], + context_stack: &[KeyContext], + ) -> Vec { + let mut bindings = self + .bindings() + .enumerate() + .rev() + .filter_map(|(ix, binding)| { + let depth = self.binding_enabled(binding, context_stack)?; + let pending = binding.match_keystrokes(input); + match pending { + None => None, + Some(is_pending) => { + if !is_pending || is_no_action(&*binding.action) { + return None; + } + Some((depth, BindingIndex(ix), binding)) + } + } + }) + .collect::>(); + + bindings.sort_by(|(depth_a, ix_a, _), (depth_b, ix_b, _)| { + depth_b.cmp(depth_a).then(ix_b.cmp(ix_a)) + }); + + bindings + .into_iter() + .map(|(_, _, binding)| binding.clone()) + .collect::>() + } } #[cfg(test)] diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index decdc547353f9290b710b337c7dd99cdae188918..f120e075fea7f9336e2f6e10c51611d8ba03564d 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -39,9 +39,10 @@ use crate::{ Action, AnyWindowHandle, App, AsyncWindowContext, BackgroundExecutor, Bounds, DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, - Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph, - ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, TaskLabel, Window, - WindowControlArea, hash, point, px, size, + Point, Priority, RealtimePriority, RenderGlyphParams, RenderImage, RenderImageParams, + RenderSvgParams, Scene, ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer, + SystemWindowTab, Task, TaskLabel, TaskTiming, ThreadTaskTimings, Window, WindowControlArea, + hash, point, px, size, }; use anyhow::Result; use async_task::Runnable; @@ -76,7 +77,6 @@ pub use keystroke::*; pub(crate) use linux::*; #[cfg(target_os = "macos")] pub(crate) use mac::*; -pub use semantic_version::SemanticVersion; #[cfg(any(test, feature = "test-support"))] pub(crate) use test::*; #[cfg(target_os = "windows")] @@ -290,6 +290,13 @@ pub trait PlatformDisplay: Send + Sync + Debug { /// Get the bounds for this display fn bounds(&self) -> Bounds; + /// Get the visible bounds for this display, excluding taskbar/dock areas. + /// This is the usable area where windows can be placed without being obscured. + /// Defaults to the full display bounds if not overridden. + fn visible_bounds(&self) -> Bounds { + self.bounds() + } + /// Get the default bounds for this display to place a window fn default_bounds(&self) -> Bounds { let bounds = self.bounds(); @@ -559,14 +566,33 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { } } +/// This type is public so that our test macro can generate and use it, but it should not +/// be considered part of our public API. +#[doc(hidden)] +#[derive(Debug)] +pub struct RunnableMeta { + /// Location of the runnable + pub location: &'static core::panic::Location<'static>, +} + +#[doc(hidden)] +pub enum RunnableVariant { + Meta(Runnable), + Compat(Runnable), +} + /// This type is public so that our test macro can generate and use it, but it should not /// be considered part of our public API. #[doc(hidden)] pub trait PlatformDispatcher: Send + Sync { + fn get_all_timings(&self) -> Vec; + fn get_current_thread_timings(&self) -> Vec; fn is_main_thread(&self) -> bool; - fn dispatch(&self, runnable: Runnable, label: Option); - fn dispatch_on_main_thread(&self, runnable: Runnable); - fn dispatch_after(&self, duration: Duration, runnable: Runnable); + fn dispatch(&self, runnable: RunnableVariant, label: Option, priority: Priority); + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority); + fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant); + fn spawn_realtime(&self, priority: RealtimePriority, f: Box); + fn now(&self) -> Instant { Instant::now() } @@ -1328,11 +1354,12 @@ pub enum WindowKind { /// /// On macOS, this corresponds to named [`NSAppearance`](https://developer.apple.com/documentation/appkit/nsappearance) /// values. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum WindowAppearance { /// A light appearance. /// /// On macOS, this corresponds to the `aqua` appearance. + #[default] Light, /// A light appearance with vibrant colors. @@ -1351,12 +1378,6 @@ pub enum WindowAppearance { VibrantDark, } -impl Default for WindowAppearance { - fn default() -> Self { - Self::Light - } -} - /// The appearance of the background of the window itself, when there is /// no content or the content is transparent. #[derive(Copy, Clone, Debug, Default, PartialEq)] @@ -1376,6 +1397,10 @@ pub enum WindowBackgroundAppearance { /// /// Not always supported. Blurred, + /// The Mica backdrop material, supported on Windows 11. + MicaBackdrop, + /// The Mica Alt backdrop material, supported on Windows 11. + MicaAltBackdrop, } /// The options that can be configured for a file dialog prompt @@ -1457,9 +1482,10 @@ impl From<&str> for PromptButton { } /// The style of the cursor (pointer) -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub enum CursorStyle { /// The default cursor + #[default] Arrow, /// A text input cursor @@ -1546,12 +1572,6 @@ pub enum CursorStyle { None, } -impl Default for CursorStyle { - fn default() -> Self { - Self::Arrow - } -} - /// A clipboard item that should be copied to the clipboard #[derive(Clone, Debug, Eq, PartialEq)] pub struct ClipboardItem { @@ -1565,6 +1585,8 @@ pub enum ClipboardEntry { String(ClipboardString), /// An image entry Image(Image), + /// A file entry + ExternalPaths(crate::ExternalPaths), } impl ClipboardItem { @@ -1605,16 +1627,29 @@ impl ClipboardItem { /// Returns None if there were no ClipboardString entries. pub fn text(&self) -> Option { let mut answer = String::new(); - let mut any_entries = false; for entry in self.entries.iter() { if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry { answer.push_str(text); - any_entries = true; } } - if any_entries { Some(answer) } else { None } + if answer.is_empty() { + for entry in self.entries.iter() { + if let ClipboardEntry::ExternalPaths(paths) = entry { + for path in &paths.0 { + use std::fmt::Write as _; + _ = write!(answer, "{}", path.display()); + } + } + } + } + + if !answer.is_empty() { + Some(answer) + } else { + None + } } /// If this item is one ClipboardEntry::String, returns its metadata. diff --git a/crates/gpui/src/platform/linux/dispatcher.rs b/crates/gpui/src/platform/linux/dispatcher.rs index 9ca1f76fd6996ffbd376d8254cbbe63a1c8d8fd0..c8ae7269edd495669baa6ab0e22e745917f143b2 100644 --- a/crates/gpui/src/platform/linux/dispatcher.rs +++ b/crates/gpui/src/platform/linux/dispatcher.rs @@ -1,46 +1,84 @@ -use crate::{PlatformDispatcher, TaskLabel}; -use async_task::Runnable; use calloop::{ - EventLoop, + EventLoop, PostAction, channel::{self, Sender}, timer::TimeoutAction, }; +use util::ResultExt; + use std::{ + mem::MaybeUninit, thread, time::{Duration, Instant}, }; -use util::ResultExt; + +use crate::{ + GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, PriorityQueueReceiver, + PriorityQueueSender, RealtimePriority, RunnableVariant, THREAD_TIMINGS, TaskLabel, TaskTiming, + ThreadTaskTimings, profiler, +}; struct TimerAfter { duration: Duration, - runnable: Runnable, + runnable: RunnableVariant, } pub(crate) struct LinuxDispatcher { - main_sender: Sender, + main_sender: PriorityQueueCalloopSender, timer_sender: Sender, - background_sender: flume::Sender, + background_sender: PriorityQueueSender, _background_threads: Vec>, main_thread_id: thread::ThreadId, } +const MIN_THREADS: usize = 2; + impl LinuxDispatcher { - pub fn new(main_sender: Sender) -> Self { - let (background_sender, background_receiver) = flume::unbounded::(); - let thread_count = std::thread::available_parallelism() - .map(|i| i.get()) - .unwrap_or(1); + pub fn new(main_sender: PriorityQueueCalloopSender) -> Self { + let (background_sender, background_receiver) = PriorityQueueReceiver::new(); + let thread_count = + std::thread::available_parallelism().map_or(MIN_THREADS, |i| i.get().max(MIN_THREADS)); + // These thread should really be lower prio then the foreground + // executor let mut background_threads = (0..thread_count) .map(|i| { - let receiver = background_receiver.clone(); + let mut receiver = background_receiver.clone(); std::thread::Builder::new() .name(format!("Worker-{i}")) .spawn(move || { - for runnable in receiver { + for runnable in receiver.iter() { let start = Instant::now(); - runnable.run(); + let mut location = match runnable { + RunnableVariant::Meta(runnable) => { + let location = runnable.metadata().location; + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + RunnableVariant::Compat(runnable) => { + let location = core::panic::Location::caller(); + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + }; + + let end = Instant::now(); + location.end = Some(end); + profiler::add_task_timing(location); log::trace!( "background thread {}: ran runnable. took: {:?}", @@ -72,7 +110,36 @@ impl LinuxDispatcher { calloop::timer::Timer::from_duration(timer.duration), move |_, _, _| { if let Some(runnable) = runnable.take() { - runnable.run(); + let start = Instant::now(); + let mut timing = match runnable { + RunnableVariant::Meta(runnable) => { + let location = runnable.metadata().location; + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + RunnableVariant::Compat(runnable) => { + let timing = TaskTiming { + location: core::panic::Location::caller(), + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + }; + let end = Instant::now(); + + timing.end = Some(end); + profiler::add_task_timing(timing); } TimeoutAction::Drop }, @@ -99,31 +166,305 @@ impl LinuxDispatcher { } impl PlatformDispatcher for LinuxDispatcher { + fn get_all_timings(&self) -> Vec { + let global_timings = GLOBAL_THREAD_TIMINGS.lock(); + ThreadTaskTimings::convert(&global_timings) + } + + fn get_current_thread_timings(&self) -> Vec { + THREAD_TIMINGS.with(|timings| { + let timings = timings.lock(); + let timings = &timings.timings; + + let mut vec = Vec::with_capacity(timings.len()); + + let (s1, s2) = timings.as_slices(); + vec.extend_from_slice(s1); + vec.extend_from_slice(s2); + vec + }) + } + fn is_main_thread(&self) -> bool { thread::current().id() == self.main_thread_id } - fn dispatch(&self, runnable: Runnable, _: Option) { - self.background_sender.send(runnable).unwrap(); + fn dispatch(&self, runnable: RunnableVariant, _: Option, priority: Priority) { + self.background_sender + .send(priority, runnable) + .unwrap_or_else(|_| panic!("blocking sender returned without value")); } - fn dispatch_on_main_thread(&self, runnable: Runnable) { - self.main_sender.send(runnable).unwrap_or_else(|runnable| { - // NOTE: Runnable may wrap a Future that is !Send. - // - // This is usually safe because we only poll it on the main thread. - // However if the send fails, we know that: - // 1. main_receiver has been dropped (which implies the app is shutting down) - // 2. we are on a background thread. - // It is not safe to drop something !Send on the wrong thread, and - // the app will exit soon anyway, so we must forget the runnable. - std::mem::forget(runnable); - }); + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority) { + self.main_sender + .send(priority, runnable) + .unwrap_or_else(|runnable| { + // NOTE: Runnable may wrap a Future that is !Send. + // + // This is usually safe because we only poll it on the main thread. + // However if the send fails, we know that: + // 1. main_receiver has been dropped (which implies the app is shutting down) + // 2. we are on a background thread. + // It is not safe to drop something !Send on the wrong thread, and + // the app will exit soon anyway, so we must forget the runnable. + std::mem::forget(runnable); + }); } - fn dispatch_after(&self, duration: Duration, runnable: Runnable) { + fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { self.timer_sender .send(TimerAfter { duration, runnable }) .ok(); } + + fn spawn_realtime(&self, priority: RealtimePriority, f: Box) { + std::thread::spawn(move || { + // SAFETY: always safe to call + let thread_id = unsafe { libc::pthread_self() }; + + let policy = match priority { + RealtimePriority::Audio => libc::SCHED_FIFO, + RealtimePriority::Other => libc::SCHED_RR, + }; + let sched_priority = match priority { + RealtimePriority::Audio => 65, + RealtimePriority::Other => 45, + }; + + // SAFETY: all sched_param members are valid when initialized to zero. + let mut sched_param = + unsafe { MaybeUninit::::zeroed().assume_init() }; + sched_param.sched_priority = sched_priority; + // SAFETY: sched_param is a valid initialized structure + let result = unsafe { libc::pthread_setschedparam(thread_id, policy, &sched_param) }; + if result != 0 { + log::warn!("failed to set realtime thread priority to {:?}", priority); + } + + f(); + }); + } +} + +pub struct PriorityQueueCalloopSender { + sender: PriorityQueueSender, + ping: calloop::ping::Ping, +} + +impl PriorityQueueCalloopSender { + fn new(tx: PriorityQueueSender, ping: calloop::ping::Ping) -> Self { + Self { sender: tx, ping } + } + + fn send(&self, priority: Priority, item: T) -> Result<(), crate::queue::SendError> { + let res = self.sender.send(priority, item); + if res.is_ok() { + self.ping.ping(); + } + res + } +} + +impl Drop for PriorityQueueCalloopSender { + fn drop(&mut self) { + self.ping.ping(); + } +} + +pub struct PriorityQueueCalloopReceiver { + receiver: PriorityQueueReceiver, + source: calloop::ping::PingSource, + ping: calloop::ping::Ping, +} + +impl PriorityQueueCalloopReceiver { + pub fn new() -> (PriorityQueueCalloopSender, Self) { + let (ping, source) = calloop::ping::make_ping().expect("Failed to create a Ping."); + + let (tx, rx) = PriorityQueueReceiver::new(); + + ( + PriorityQueueCalloopSender::new(tx, ping.clone()), + Self { + receiver: rx, + source, + ping, + }, + ) + } +} + +use calloop::channel::Event; + +#[derive(Debug)] +pub struct ChannelError(calloop::ping::PingError); + +impl std::fmt::Display for ChannelError { + #[cfg_attr(feature = "nightly_coverage", coverage(off))] + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(&self.0, f) + } } + +impl std::error::Error for ChannelError { + #[cfg_attr(feature = "nightly_coverage", coverage(off))] + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + Some(&self.0) + } +} + +impl calloop::EventSource for PriorityQueueCalloopReceiver { + type Event = Event; + type Metadata = (); + type Ret = (); + type Error = ChannelError; + + fn process_events( + &mut self, + readiness: calloop::Readiness, + token: calloop::Token, + mut callback: F, + ) -> Result + where + F: FnMut(Self::Event, &mut Self::Metadata) -> Self::Ret, + { + let mut clear_readiness = false; + let mut disconnected = false; + + let action = self + .source + .process_events(readiness, token, |(), &mut ()| { + let mut is_empty = true; + + let mut receiver = self.receiver.clone(); + for runnable in receiver.try_iter() { + match runnable { + Ok(r) => { + callback(Event::Msg(r), &mut ()); + is_empty = false; + } + Err(_) => { + disconnected = true; + } + } + } + + if disconnected { + callback(Event::Closed, &mut ()); + } + + if is_empty { + clear_readiness = true; + } + }) + .map_err(ChannelError)?; + + if disconnected { + Ok(PostAction::Remove) + } else if clear_readiness { + Ok(action) + } else { + // Re-notify the ping source so we can try again. + self.ping.ping(); + Ok(PostAction::Continue) + } + } + + fn register( + &mut self, + poll: &mut calloop::Poll, + token_factory: &mut calloop::TokenFactory, + ) -> calloop::Result<()> { + self.source.register(poll, token_factory) + } + + fn reregister( + &mut self, + poll: &mut calloop::Poll, + token_factory: &mut calloop::TokenFactory, + ) -> calloop::Result<()> { + self.source.reregister(poll, token_factory) + } + + fn unregister(&mut self, poll: &mut calloop::Poll) -> calloop::Result<()> { + self.source.unregister(poll) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn calloop_works() { + let mut event_loop = calloop::EventLoop::try_new().unwrap(); + let handle = event_loop.handle(); + + let (tx, rx) = PriorityQueueCalloopReceiver::new(); + + struct Data { + got_msg: bool, + got_closed: bool, + } + + let mut data = Data { + got_msg: false, + got_closed: false, + }; + + let _channel_token = handle + .insert_source(rx, move |evt, &mut (), data: &mut Data| match evt { + Event::Msg(()) => { + data.got_msg = true; + } + + Event::Closed => { + data.got_closed = true; + } + }) + .unwrap(); + + // nothing is sent, nothing is received + event_loop + .dispatch(Some(::std::time::Duration::ZERO), &mut data) + .unwrap(); + + assert!(!data.got_msg); + assert!(!data.got_closed); + // a message is send + + tx.send(Priority::Medium, ()).unwrap(); + event_loop + .dispatch(Some(::std::time::Duration::ZERO), &mut data) + .unwrap(); + + assert!(data.got_msg); + assert!(!data.got_closed); + + // the sender is dropped + drop(tx); + event_loop + .dispatch(Some(::std::time::Duration::ZERO), &mut data) + .unwrap(); + + assert!(data.got_msg); + assert!(data.got_closed); + } +} + +// running 1 test +// test platform::linux::dispatcher::tests::tomato ... FAILED + +// failures: + +// ---- platform::linux::dispatcher::tests::tomato stdout ---- +// [crates/gpui/src/platform/linux/dispatcher.rs:262:9] +// returning 1 tasks to process +// [crates/gpui/src/platform/linux/dispatcher.rs:480:75] evt = Msg( +// (), +// ) +// returning 0 tasks to process + +// thread 'platform::linux::dispatcher::tests::tomato' (478301) panicked at crates/gpui/src/platform/linux/dispatcher.rs:515:9: +// assertion failed: data.got_closed +// note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace diff --git a/crates/gpui/src/platform/linux/headless/client.rs b/crates/gpui/src/platform/linux/headless/client.rs index da54db371033bac53e2ac3324306fa86eb57fb57..33f1bb17e3230d0b9c9b2c53bcd0603a9cc7f22c 100644 --- a/crates/gpui/src/platform/linux/headless/client.rs +++ b/crates/gpui/src/platform/linux/headless/client.rs @@ -31,7 +31,10 @@ impl HeadlessClient { handle .insert_source(main_receiver, |event, _, _: &mut HeadlessClient| { if let calloop::channel::Event::Msg(runnable) = event { - runnable.run(); + match runnable { + crate::RunnableVariant::Meta(runnable) => runnable.run(), + crate::RunnableVariant::Compat(runnable) => runnable.run(), + }; } }) .ok(); diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 322f5d76110ee36e3cfdf26449bbec85c3d51af5..06a81ec342e9d528a081456583f3ba0f3fb77b6f 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -1,7 +1,6 @@ use std::{ env, path::{Path, PathBuf}, - process::Command, rc::Rc, sync::Arc, }; @@ -15,10 +14,10 @@ use std::{ }; use anyhow::{Context as _, anyhow}; -use async_task::Runnable; -use calloop::{LoopSignal, channel::Channel}; +use calloop::LoopSignal; use futures::channel::oneshot; use util::ResultExt as _; +use util::command::{new_smol_command, new_std_command}; #[cfg(any(feature = "wayland", feature = "x11"))] use xkbcommon::xkb::{self, Keycode, Keysym, State}; @@ -26,7 +25,8 @@ use crate::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId, ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, - PlatformTextSystem, PlatformWindow, Point, Result, Task, WindowAppearance, WindowParams, px, + PlatformTextSystem, PlatformWindow, Point, PriorityQueueCalloopReceiver, Result, + RunnableVariant, Task, WindowAppearance, WindowParams, px, }; #[cfg(any(feature = "wayland", feature = "x11"))] @@ -43,6 +43,50 @@ pub(crate) const KEYRING_LABEL: &str = "zed-github-account"; const FILE_PICKER_PORTAL_MISSING: &str = "Couldn't open file picker due to missing xdg-desktop-portal implementation."; +#[cfg(any(feature = "x11", feature = "wayland"))] +pub trait ResultExt { + type Ok; + + fn notify_err(self, msg: &'static str) -> Self::Ok; +} + +#[cfg(any(feature = "x11", feature = "wayland"))] +impl ResultExt for anyhow::Result { + type Ok = T; + + fn notify_err(self, msg: &'static str) -> T { + match self { + Ok(v) => v, + Err(e) => { + use ashpd::desktop::notification::{Notification, NotificationProxy, Priority}; + use futures::executor::block_on; + + let proxy = block_on(NotificationProxy::new()).expect(msg); + + let notification_id = "dev.zed.Oops"; + block_on( + proxy.add_notification( + notification_id, + Notification::new("Zed failed to launch") + .body(Some( + format!( + "{e:?}. See https://zed.dev/docs/linux for troubleshooting steps." + ) + .as_str(), + )) + .priority(Priority::High) + .icon(ashpd::desktop::Icon::with_names(&[ + "dialog-question-symbolic", + ])), + ) + ).expect(msg); + + panic!("{msg}"); + } + } + } +} + pub trait LinuxClient { fn compositor_name(&self) -> &'static str; fn with_common(&self, f: impl FnOnce(&mut LinuxCommon) -> R) -> R; @@ -105,8 +149,8 @@ pub(crate) struct LinuxCommon { } impl LinuxCommon { - pub fn new(signal: LoopSignal) -> (Self, Channel) { - let (main_sender, main_receiver) = calloop::channel::channel::(); + pub fn new(signal: LoopSignal) -> (Self, PriorityQueueCalloopReceiver) { + let (main_sender, main_receiver) = PriorityQueueCalloopReceiver::new(); #[cfg(any(feature = "wayland", feature = "x11"))] let text_system = Arc::new(crate::CosmicTextSystem::new()); @@ -215,7 +259,7 @@ impl Platform for P { clippy::disallowed_methods, reason = "We are restarting ourselves, using std command thus is fine" )] - let restart_process = Command::new("/usr/bin/env") + let restart_process = new_std_command("/usr/bin/env") .arg("bash") .arg("-c") .arg(script) @@ -422,7 +466,7 @@ impl Platform for P { let path = path.to_owned(); self.background_executor() .spawn(async move { - let _ = smol::process::Command::new("xdg-open") + let _ = new_smol_command("xdg-open") .arg(path) .spawn() .context("invoking xdg-open") @@ -605,8 +649,9 @@ pub(super) fn open_uri_internal( .activation_token(activation_token.clone().map(ashpd::ActivationToken::from)) .send_uri(&uri) .await + .and_then(|e| e.response()) { - Ok(_) => return, + Ok(()) => return, Err(e) => log::error!("Failed to open with dbus: {}", e), } diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index fd4d9fb2b31bfa04fe1ecc7d192db11f997d8d59..0e7bf8fbf8880baf5876027e6e764d7411932577 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -17,7 +17,7 @@ use collections::HashMap; use filedescriptor::Pipe; use http_client::Url; use smallvec::SmallVec; -use util::ResultExt; +use util::ResultExt as _; use wayland_backend::client::ObjectId; use wayland_backend::protocol::WEnum; use wayland_client::event_created_child; @@ -71,14 +71,17 @@ use super::{ window::{ImeInput, WaylandWindowStatePtr}, }; -use crate::platform::{PlatformWindow, blade::BladeContext}; use crate::{ AnyWindowHandle, Bounds, Capslock, CursorStyle, DOUBLE_CLICK_INTERVAL, DevicePixels, DisplayId, FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon, LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay, - PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScrollDelta, ScrollWheelEvent, - Size, TouchPhase, WindowParams, point, px, size, + PlatformInput, PlatformKeyboardLayout, Point, ResultExt as _, SCROLL_LINES, ScrollDelta, + ScrollWheelEvent, Size, TouchPhase, WindowParams, point, profiler, px, size, +}; +use crate::{ + RunnableVariant, TaskTiming, + platform::{PlatformWindow, blade::BladeContext}, }; use crate::{ SharedString, @@ -491,14 +494,45 @@ impl WaylandClient { move |event, _, _: &mut WaylandClientStatePtr| { if let calloop::channel::Event::Msg(runnable) = event { handle.insert_idle(|_| { - runnable.run(); + let start = Instant::now(); + let mut timing = match runnable { + RunnableVariant::Meta(runnable) => { + let location = runnable.metadata().location; + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + RunnableVariant::Compat(runnable) => { + let location = core::panic::Location::caller(); + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + }; + + let end = Instant::now(); + timing.end = Some(end); + profiler::add_task_timing(timing); }); } } }) .unwrap(); - let gpu_context = BladeContext::new().expect("Unable to init GPU context"); + // This could be unified with the notification handling in zed/main:fail_to_open_window. + let gpu_context = BladeContext::new().notify_err("Unable to init GPU context"); let seat = seat.unwrap(); let globals = Globals::new( @@ -1386,6 +1420,7 @@ impl Dispatch for WaylandClientStatePtr { state.repeat.current_keycode = Some(keycode); let rate = state.repeat.characters_per_second; + let repeat_interval = Duration::from_secs(1) / rate.max(1); let id = state.repeat.current_id; state .loop_handle @@ -1395,7 +1430,7 @@ impl Dispatch for WaylandClientStatePtr { is_held: true, prefer_character_input: false, }); - move |_event, _metadata, this| { + move |event_timestamp, _metadata, this| { let mut client = this.get_client(); let mut state = client.borrow_mut(); let is_repeating = id == state.repeat.current_id @@ -1412,7 +1447,8 @@ impl Dispatch for WaylandClientStatePtr { drop(state); focused_window.handle_input(input.clone()); - TimeoutAction::ToDuration(Duration::from_secs(1) / rate) + // If the new scheduled time is in the past the event will repeat as soon as possible + TimeoutAction::ToInstant(event_timestamp + repeat_interval) } }) .unwrap(); diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index c02d1f3bc3d0d1ecf7589ae959f8c9b0e3f0fde5..8cc47c3c139708c3cc278c6146411a4383cc0004 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -1025,13 +1025,26 @@ impl PlatformWindow for WaylandWindow { fn resize(&mut self, size: Size) { let state = self.borrow(); let state_ptr = self.0.clone(); - let dp_size = size.to_device_pixels(self.scale_factor()); + + // Keep window geometry consistent with configure handling. On Wayland, window geometry is + // surface-local: resizing should not attempt to translate the window; the compositor + // controls placement. We also account for client-side decoration insets and tiling. + let window_geometry = inset_by_tiling( + Bounds { + origin: Point::default(), + size, + }, + state.inset(), + state.tiling, + ) + .map(|v| v.0 as i32) + .map_size(|v| if v <= 0 { 1 } else { v }); state.surface_state.set_geometry( - state.bounds.origin.x.0 as i32, - state.bounds.origin.y.0 as i32, - dp_size.width.0, - dp_size.height.0, + window_geometry.origin.x, + window_geometry.origin.y, + window_geometry.size.width, + window_geometry.size.height, ); state @@ -1270,10 +1283,21 @@ impl PlatformWindow for WaylandWindow { fn request_decorations(&self, decorations: WindowDecorations) { let mut state = self.borrow_mut(); - state.decorations = decorations; - if let Some(decoration) = state.surface_state.decoration() { - decoration.set_mode(decorations.to_xdg()); - update_window(state); + match state.surface_state.decoration().as_ref() { + Some(decoration) => { + decoration.set_mode(decorations.to_xdg()); + state.decorations = decorations; + update_window(state); + } + None => { + if matches!(decorations, WindowDecorations::Server) { + log::info!( + "Server-side decorations requested, but the Wayland server does not support them. Falling back to client-side decorations." + ); + } + state.decorations = WindowDecorations::Client; + update_window(state); + } } } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 5b0be84b2fc08d220800271a402496e5ba487b15..5e9089b09809a7ec1b8b257427b0a670adc0f123 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,4 +1,4 @@ -use crate::{Capslock, xcb_flush}; +use crate::{Capslock, ResultExt as _, RunnableVariant, TaskTiming, profiler, xcb_flush}; use anyhow::{Context as _, anyhow}; use ashpd::WindowIdentifier; use calloop::{ @@ -18,7 +18,7 @@ use std::{ rc::{Rc, Weak}, time::{Duration, Instant}, }; -use util::ResultExt; +use util::ResultExt as _; use x11rb::{ connection::{Connection, RequestConnection}, @@ -29,7 +29,7 @@ use x11rb::{ protocol::xkb::ConnectionExt as _, protocol::xproto::{ AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, - ConnectionExt as _, EventMask, Visibility, + ConnectionExt as _, EventMask, ModMask, Visibility, }, protocol::{Event, randr, render, xinput, xkb, xproto}, resource_manager::Database, @@ -313,7 +313,37 @@ impl X11Client { // events have higher priority and runnables are only worked off after the event // callbacks. handle.insert_idle(|_| { - runnable.run(); + let start = Instant::now(); + let mut timing = match runnable { + RunnableVariant::Meta(runnable) => { + let location = runnable.metadata().location; + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + RunnableVariant::Compat(runnable) => { + let location = core::panic::Location::caller(); + let timing = TaskTiming { + location, + start, + end: None, + }; + profiler::add_task_timing(timing); + + runnable.run(); + timing + } + }; + + let end = Instant::now(); + timing.end = Some(end); + profiler::add_task_timing(timing); }); } } @@ -407,7 +437,7 @@ impl X11Client { .to_string(); let keyboard_layout = LinuxKeyboardLayout::new(layout_name.into()); - let gpu_context = BladeContext::new().context("Unable to init GPU context")?; + let gpu_context = BladeContext::new().notify_err("Unable to init GPU context"); let resource_database = x11rb::resource_manager::new_from_default(&xcb_connection) .context("Failed to create resource database")?; @@ -988,6 +1018,12 @@ impl X11Client { let modifiers = modifiers_from_state(event.state); state.modifiers = modifiers; state.pre_key_char_down.take(); + + // Macros containing modifiers might result in + // the modifiers missing from the event. + // We therefore update the mask from the global state. + update_xkb_mask_from_event_state(&mut state.xkb, event.state); + let keystroke = { let code = event.detail.into(); let mut keystroke = crate::Keystroke::from_xkb(&state.xkb, modifiers, code); @@ -1053,6 +1089,11 @@ impl X11Client { let modifiers = modifiers_from_state(event.state); state.modifiers = modifiers; + // Macros containing modifiers might result in + // the modifiers missing from the event. + // We therefore update the mask from the global state. + update_xkb_mask_from_event_state(&mut state.xkb, event.state); + let keystroke = { let code = event.detail.into(); let keystroke = crate::Keystroke::from_xkb(&state.xkb, modifiers, code); @@ -2486,3 +2527,19 @@ fn get_dpi_factor((width_px, height_px): (u32, u32), (width_mm, height_mm): (u64 fn valid_scale_factor(scale_factor: f32) -> bool { scale_factor.is_sign_positive() && scale_factor.is_normal() } + +#[inline] +fn update_xkb_mask_from_event_state(xkb: &mut xkbc::State, event_state: xproto::KeyButMask) { + let depressed_mods = event_state.remove((ModMask::LOCK | ModMask::M2).bits()); + let latched_mods = xkb.serialize_mods(xkbc::STATE_MODS_LATCHED); + let locked_mods = xkb.serialize_mods(xkbc::STATE_MODS_LOCKED); + let locked_layout = xkb.serialize_layout(xkbc::STATE_LAYOUT_LOCKED); + xkb.update_mask( + depressed_mods.into(), + latched_mods, + locked_mods, + 0, + 0, + locked_layout, + ); +} diff --git a/crates/gpui/src/platform/mac.rs b/crates/gpui/src/platform/mac.rs index 76d636b457517da64cf66988325652ddea56c5d3..aa056846e6bc56e53d95c41a44444dbb89a16237 100644 --- a/crates/gpui/src/platform/mac.rs +++ b/crates/gpui/src/platform/mac.rs @@ -135,6 +135,8 @@ unsafe impl objc::Encode for NSRange { } } +/// Allow NSString::alloc use here because it sets autorelease +#[allow(clippy::disallowed_methods)] unsafe fn ns_string(string: &str) -> id { unsafe { NSString::alloc(nil).init_str(string).autorelease() } } diff --git a/crates/gpui/src/platform/mac/attributed_string.rs b/crates/gpui/src/platform/mac/attributed_string.rs index 5f313ac699d6e1a096c4bcf807fd6c080d0064da..42fe1e5bf7a396a4eaa8ade26977a207d43b49b5 100644 --- a/crates/gpui/src/platform/mac/attributed_string.rs +++ b/crates/gpui/src/platform/mac/attributed_string.rs @@ -50,10 +50,12 @@ impl NSMutableAttributedString for id {} #[cfg(test)] mod tests { + use crate::platform::mac::ns_string; + use super::*; use cocoa::appkit::NSImage; use cocoa::base::nil; - use cocoa::foundation::NSString; + use cocoa::foundation::NSAutoreleasePool; #[test] #[ignore] // This was SIGSEGV-ing on CI but not locally; need to investigate https://github.com/zed-industries/zed/actions/runs/10362363230/job/28684225486?pr=15782#step:4:1348 fn test_nsattributed_string() { @@ -68,26 +70,34 @@ mod tests { impl NSTextAttachment for id {} unsafe { - let image: id = msg_send![class!(NSImage), alloc]; - image.initWithContentsOfFile_(NSString::alloc(nil).init_str("test.jpeg")); + let image: id = { + let img: id = msg_send![class!(NSImage), alloc]; + let img: id = msg_send![img, initWithContentsOfFile: ns_string("test.jpeg")]; + let img: id = msg_send![img, autorelease]; + img + }; let _size = image.size(); - let string = NSString::alloc(nil).init_str("Test String"); - let attr_string = NSMutableAttributedString::alloc(nil).init_attributed_string(string); - let hello_string = NSString::alloc(nil).init_str("Hello World"); - let hello_attr_string = - NSAttributedString::alloc(nil).init_attributed_string(hello_string); + let string = ns_string("Test String"); + let attr_string = NSMutableAttributedString::alloc(nil) + .init_attributed_string(string) + .autorelease(); + let hello_string = ns_string("Hello World"); + let hello_attr_string = NSAttributedString::alloc(nil) + .init_attributed_string(hello_string) + .autorelease(); attr_string.appendAttributedString_(hello_attr_string); - let attachment = NSTextAttachment::alloc(nil); + let attachment: id = msg_send![NSTextAttachment::alloc(nil), autorelease]; let _: () = msg_send![attachment, setImage: image]; let image_attr_string = msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment]; attr_string.appendAttributedString_(image_attr_string); - let another_string = NSString::alloc(nil).init_str("Another String"); - let another_attr_string = - NSAttributedString::alloc(nil).init_attributed_string(another_string); + let another_string = ns_string("Another String"); + let another_attr_string = NSAttributedString::alloc(nil) + .init_attributed_string(another_string) + .autorelease(); attr_string.appendAttributedString_(another_attr_string); let _len: cocoa::foundation::NSUInteger = msg_send![attr_string, length]; diff --git a/crates/gpui/src/platform/mac/dispatcher.rs b/crates/gpui/src/platform/mac/dispatcher.rs index c72f791f850469287cf66021558032902982ccec..1dfea82d58cbf2387571cabdcd7fbcfcf785c735 100644 --- a/crates/gpui/src/platform/mac/dispatcher.rs +++ b/crates/gpui/src/platform/mac/dispatcher.rs @@ -2,8 +2,23 @@ #![allow(non_camel_case_types)] #![allow(non_snake_case)] -use crate::{PlatformDispatcher, TaskLabel}; +use crate::{ + GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RealtimePriority, RunnableMeta, + RunnableVariant, THREAD_TIMINGS, TaskLabel, TaskTiming, ThreadTaskTimings, +}; + +use anyhow::Context; use async_task::Runnable; +use mach2::{ + kern_return::KERN_SUCCESS, + mach_time::mach_timebase_info_data_t, + thread_policy::{ + THREAD_EXTENDED_POLICY, THREAD_EXTENDED_POLICY_COUNT, THREAD_PRECEDENCE_POLICY, + THREAD_PRECEDENCE_POLICY_COUNT, THREAD_TIME_CONSTRAINT_POLICY, + THREAD_TIME_CONSTRAINT_POLICY_COUNT, thread_extended_policy_data_t, + thread_precedence_policy_data_t, thread_time_constraint_policy_data_t, + }, +}; use objc::{ class, msg_send, runtime::{BOOL, YES}, @@ -11,9 +26,11 @@ use objc::{ }; use std::{ ffi::c_void, + mem::MaybeUninit, ptr::{NonNull, addr_of}, - time::Duration, + time::{Duration, Instant}, }; +use util::ResultExt; /// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent /// these pub items from leaking into public API. @@ -29,47 +46,277 @@ pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t { pub(crate) struct MacDispatcher; impl PlatformDispatcher for MacDispatcher { + fn get_all_timings(&self) -> Vec { + let global_timings = GLOBAL_THREAD_TIMINGS.lock(); + ThreadTaskTimings::convert(&global_timings) + } + + fn get_current_thread_timings(&self) -> Vec { + THREAD_TIMINGS.with(|timings| { + let timings = &timings.lock().timings; + + let mut vec = Vec::with_capacity(timings.len()); + + let (s1, s2) = timings.as_slices(); + vec.extend_from_slice(s1); + vec.extend_from_slice(s2); + vec + }) + } + fn is_main_thread(&self) -> bool { let is_main_thread: BOOL = unsafe { msg_send![class!(NSThread), isMainThread] }; is_main_thread == YES } - fn dispatch(&self, runnable: Runnable, _: Option) { + fn dispatch(&self, runnable: RunnableVariant, _: Option, priority: Priority) { + let (context, trampoline) = match runnable { + RunnableVariant::Meta(runnable) => ( + runnable.into_raw().as_ptr() as *mut c_void, + Some(trampoline as unsafe extern "C" fn(*mut c_void)), + ), + RunnableVariant::Compat(runnable) => ( + runnable.into_raw().as_ptr() as *mut c_void, + Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)), + ), + }; + + let queue_priority = match priority { + Priority::Realtime(_) => unreachable!(), + Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize, + Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize, + Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize, + }; + unsafe { dispatch_async_f( - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0), - runnable.into_raw().as_ptr() as *mut c_void, - Some(trampoline), + dispatch_get_global_queue(queue_priority, 0), + context, + trampoline, ); } } - fn dispatch_on_main_thread(&self, runnable: Runnable) { - unsafe { - dispatch_async_f( - dispatch_get_main_queue(), + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) { + let (context, trampoline) = match runnable { + RunnableVariant::Meta(runnable) => ( runnable.into_raw().as_ptr() as *mut c_void, - Some(trampoline), - ); + Some(trampoline as unsafe extern "C" fn(*mut c_void)), + ), + RunnableVariant::Compat(runnable) => ( + runnable.into_raw().as_ptr() as *mut c_void, + Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)), + ), + }; + unsafe { + dispatch_async_f(dispatch_get_main_queue(), context, trampoline); } } - fn dispatch_after(&self, duration: Duration, runnable: Runnable) { + fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { + let (context, trampoline) = match runnable { + RunnableVariant::Meta(runnable) => ( + runnable.into_raw().as_ptr() as *mut c_void, + Some(trampoline as unsafe extern "C" fn(*mut c_void)), + ), + RunnableVariant::Compat(runnable) => ( + runnable.into_raw().as_ptr() as *mut c_void, + Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)), + ), + }; unsafe { let queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0); let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64); - dispatch_after_f( - when, - queue, - runnable.into_raw().as_ptr() as *mut c_void, - Some(trampoline), - ); + dispatch_after_f(when, queue, context, trampoline); } } + + fn spawn_realtime(&self, priority: RealtimePriority, f: Box) { + std::thread::spawn(move || { + match priority { + RealtimePriority::Audio => set_audio_thread_priority(), + RealtimePriority::Other => set_high_thread_priority(), + } + .context(format!("for priority {:?}", priority)) + .log_err(); + + f(); + }); + } +} + +fn set_high_thread_priority() -> anyhow::Result<()> { + // SAFETY: always safe to call + let thread_id = unsafe { libc::pthread_self() }; + + // SAFETY: all sched_param members are valid when initialized to zero. + let mut sched_param = unsafe { MaybeUninit::::zeroed().assume_init() }; + sched_param.sched_priority = 45; + + let result = unsafe { libc::pthread_setschedparam(thread_id, libc::SCHED_FIFO, &sched_param) }; + if result != 0 { + anyhow::bail!("failed to set realtime thread priority") + } + + Ok(()) +} + +fn set_audio_thread_priority() -> anyhow::Result<()> { + // https://chromium.googlesource.com/chromium/chromium/+/master/base/threading/platform_thread_mac.mm#93 + + // SAFETY: always safe to call + let thread_id = unsafe { libc::pthread_self() }; + + // SAFETY: thread_id is a valid thread id + let thread_id = unsafe { libc::pthread_mach_thread_np(thread_id) }; + + // Fixed priority thread + let mut policy = thread_extended_policy_data_t { timeshare: 0 }; + + // SAFETY: thread_id is a valid thread id + // SAFETY: thread_extended_policy_data_t is passed as THREAD_EXTENDED_POLICY + let result = unsafe { + mach2::thread_policy::thread_policy_set( + thread_id, + THREAD_EXTENDED_POLICY, + &mut policy as *mut _ as *mut _, + THREAD_EXTENDED_POLICY_COUNT, + ) + }; + + if result != KERN_SUCCESS { + anyhow::bail!("failed to set thread extended policy"); + } + + // relatively high priority + let mut precedence = thread_precedence_policy_data_t { importance: 63 }; + + // SAFETY: thread_id is a valid thread id + // SAFETY: thread_precedence_policy_data_t is passed as THREAD_PRECEDENCE_POLICY + let result = unsafe { + mach2::thread_policy::thread_policy_set( + thread_id, + THREAD_PRECEDENCE_POLICY, + &mut precedence as *mut _ as *mut _, + THREAD_PRECEDENCE_POLICY_COUNT, + ) + }; + + if result != KERN_SUCCESS { + anyhow::bail!("failed to set thread precedence policy"); + } + + const GUARANTEED_AUDIO_DUTY_CYCLE: f32 = 0.75; + const MAX_AUDIO_DUTY_CYCLE: f32 = 0.85; + + // ~128 frames @ 44.1KHz + const TIME_QUANTUM: f32 = 2.9; + + const AUDIO_TIME_NEEDED: f32 = GUARANTEED_AUDIO_DUTY_CYCLE * TIME_QUANTUM; + const MAX_TIME_ALLOWED: f32 = MAX_AUDIO_DUTY_CYCLE * TIME_QUANTUM; + + let mut timebase_info = mach_timebase_info_data_t { numer: 0, denom: 0 }; + // SAFETY: timebase_info is a valid pointer to a mach_timebase_info_data_t struct + unsafe { mach2::mach_time::mach_timebase_info(&mut timebase_info) }; + + let ms_to_abs_time = ((timebase_info.denom as f32) / (timebase_info.numer as f32)) * 1000000f32; + + let mut time_constraints = thread_time_constraint_policy_data_t { + period: (TIME_QUANTUM * ms_to_abs_time) as u32, + computation: (AUDIO_TIME_NEEDED * ms_to_abs_time) as u32, + constraint: (MAX_TIME_ALLOWED * ms_to_abs_time) as u32, + preemptible: 0, + }; + + // SAFETY: thread_id is a valid thread id + // SAFETY: thread_precedence_pthread_time_constraint_policy_data_t is passed as THREAD_TIME_CONSTRAINT_POLICY + let result = unsafe { + mach2::thread_policy::thread_policy_set( + thread_id, + THREAD_TIME_CONSTRAINT_POLICY, + &mut time_constraints as *mut _ as *mut _, + THREAD_TIME_CONSTRAINT_POLICY_COUNT, + ) + }; + + if result != KERN_SUCCESS { + anyhow::bail!("failed to set thread time constraint policy"); + } + + Ok(()) } extern "C" fn trampoline(runnable: *mut c_void) { + let task = + unsafe { Runnable::::from_raw(NonNull::new_unchecked(runnable as *mut ())) }; + + let location = task.metadata().location; + + let start = Instant::now(); + let timing = TaskTiming { + location, + start, + end: None, + }; + + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + if let Some(last_timing) = timings.iter_mut().rev().next() { + if last_timing.location == timing.location { + return; + } + } + + timings.push_back(timing); + }); + + task.run(); + let end = Instant::now(); + + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + let Some(last_timing) = timings.iter_mut().rev().next() else { + return; + }; + last_timing.end = Some(end); + }); +} + +extern "C" fn trampoline_compat(runnable: *mut c_void) { let task = unsafe { Runnable::<()>::from_raw(NonNull::new_unchecked(runnable as *mut ())) }; + + let location = core::panic::Location::caller(); + + let start = Instant::now(); + let timing = TaskTiming { + location, + start, + end: None, + }; + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + if let Some(last_timing) = timings.iter_mut().rev().next() { + if last_timing.location == timing.location { + return; + } + } + + timings.push_back(timing); + }); + task.run(); + let end = Instant::now(); + + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + let Some(last_timing) = timings.iter_mut().rev().next() else { + return; + }; + last_timing.end = Some(end); + }); } diff --git a/crates/gpui/src/platform/mac/display.rs b/crates/gpui/src/platform/mac/display.rs index 4ee27027d5fbff973b9ef2c27b5d55739c8a711a..94791620e8a394f67a38c257c95c575398cee0b7 100644 --- a/crates/gpui/src/platform/mac/display.rs +++ b/crates/gpui/src/platform/mac/display.rs @@ -1,9 +1,10 @@ -use crate::{Bounds, DisplayId, Pixels, PlatformDisplay, px, size}; +use super::ns_string; +use crate::{Bounds, DisplayId, Pixels, PlatformDisplay, point, px, size}; use anyhow::Result; use cocoa::{ appkit::NSScreen, base::{id, nil}, - foundation::{NSDictionary, NSString}, + foundation::{NSArray, NSDictionary}, }; use core_foundation::uuid::{CFUUIDGetUUIDBytes, CFUUIDRef}; use core_graphics::display::{CGDirectDisplayID, CGDisplayBounds, CGGetActiveDisplayList}; @@ -35,7 +36,7 @@ impl MacDisplay { let screens = NSScreen::screens(nil); let screen = cocoa::foundation::NSArray::objectAtIndex(screens, 0); let device_description = NSScreen::deviceDescription(screen); - let screen_number_key: id = NSString::alloc(nil).init_str("NSScreenNumber"); + let screen_number_key: id = ns_string("NSScreenNumber"); let screen_number = device_description.objectForKey_(screen_number_key); let screen_number: CGDirectDisplayID = msg_send![screen_number, unsignedIntegerValue]; Self(screen_number) @@ -114,4 +115,53 @@ impl PlatformDisplay for MacDisplay { } } } + + fn visible_bounds(&self) -> Bounds { + unsafe { + let dominated_screen = self.get_nsscreen(); + + if dominated_screen == nil { + return self.bounds(); + } + + let screen_frame = NSScreen::frame(dominated_screen); + let visible_frame = NSScreen::visibleFrame(dominated_screen); + + // Convert from bottom-left origin (AppKit) to top-left origin + let origin_y = + screen_frame.size.height - visible_frame.origin.y - visible_frame.size.height + + screen_frame.origin.y; + + Bounds { + origin: point( + px(visible_frame.origin.x as f32 - screen_frame.origin.x as f32), + px(origin_y as f32), + ), + size: size( + px(visible_frame.size.width as f32), + px(visible_frame.size.height as f32), + ), + } + } + } +} + +impl MacDisplay { + /// Find the NSScreen corresponding to this display + unsafe fn get_nsscreen(&self) -> id { + let screens = unsafe { NSScreen::screens(nil) }; + let count = unsafe { NSArray::count(screens) }; + let screen_number_key: id = unsafe { ns_string("NSScreenNumber") }; + + for i in 0..count { + let screen = unsafe { NSArray::objectAtIndex(screens, i) }; + let device_description = unsafe { NSScreen::deviceDescription(screen) }; + let screen_number = unsafe { device_description.objectForKey_(screen_number_key) }; + let screen_id: CGDirectDisplayID = msg_send![screen_number, unsignedIntegerValue]; + if screen_id == self.0 { + return screen; + } + } + nil + } } diff --git a/crates/gpui/src/platform/mac/events.rs b/crates/gpui/src/platform/mac/events.rs index acc392a5f3429f20931455ea06733376ea0f587a..7a12e8d3d7ccb2e8a2f7b32b81c24a29f650e6e2 100644 --- a/crates/gpui/src/platform/mac/events.rs +++ b/crates/gpui/src/platform/mac/events.rs @@ -1,7 +1,8 @@ use crate::{ Capslock, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, - MouseDownEvent, MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, - PlatformInput, ScrollDelta, ScrollWheelEvent, TouchPhase, + MouseDownEvent, MouseExitEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, + NavigationDirection, Pixels, PlatformInput, PressureStage, ScrollDelta, ScrollWheelEvent, + TouchPhase, platform::mac::{ LMGetKbdType, NSStringExt, TISCopyCurrentKeyboardLayoutInputSource, TISGetInputSourceProperty, UCKeyTranslate, kTISPropertyUnicodeKeyLayoutData, @@ -187,6 +188,26 @@ impl PlatformInput { }) }) } + NSEventType::NSEventTypePressure => { + let stage = native_event.stage(); + let pressure = native_event.pressure(); + + window_height.map(|window_height| { + Self::MousePressure(MousePressureEvent { + stage: match stage { + 1 => PressureStage::Normal, + 2 => PressureStage::Force, + _ => PressureStage::Zero, + }, + pressure, + modifiers: read_modifiers(native_event), + position: point( + px(native_event.locationInWindow().x as f32), + window_height - px(native_event.locationInWindow().y as f32), + ), + }) + }) + } // Some mice (like Logitech MX Master) send navigation buttons as swipe events NSEventType::NSEventTypeSwipe => { let navigation_direction = match native_event.phase() { diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index 9e5d6ec5ff02c74b4f0acfada8eee3d002bfd06b..550041a0ccb4cd39bc7a86317d9540e806af2a28 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -132,11 +132,21 @@ impl MetalRenderer { // Prefer low‐power integrated GPUs on Intel Mac. On Apple // Silicon, there is only ever one GPU, so this is equivalent to // `metal::Device::system_default()`. - let mut devices = metal::Device::all(); - devices.sort_by_key(|device| (device.is_removable(), device.is_low_power())); - let Some(device) = devices.pop() else { - log::error!("unable to access a compatible graphics device"); - std::process::exit(1); + let device = if let Some(d) = metal::Device::all() + .into_iter() + .min_by_key(|d| (d.is_removable(), !d.is_low_power())) + { + d + } else { + // For some reason `all()` can return an empty list, see https://github.com/zed-industries/zed/issues/37689 + // In that case, we fall back to the system default device. + log::error!( + "Unable to enumerate Metal devices; attempting to use system default device" + ); + metal::Device::system_default().unwrap_or_else(|| { + log::error!("unable to access a compatible graphics device"); + std::process::exit(1); + }) }; let layer = metal::MetalLayer::new(); diff --git a/crates/gpui/src/platform/mac/open_type.rs b/crates/gpui/src/platform/mac/open_type.rs index 37a29559fdfbc284ffd1021cc6c2c6ed717ca228..ff501df15f671318548a3959bd6b966f97e051b1 100644 --- a/crates/gpui/src/platform/mac/open_type.rs +++ b/crates/gpui/src/platform/mac/open_type.rs @@ -52,6 +52,11 @@ pub fn apply_features_and_fallbacks( &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks, ); + + for value in &values { + CFRelease(*value as _); + } + let new_descriptor = CTFontDescriptorCreateWithAttributes(attrs); CFRelease(attrs as _); let new_descriptor = CTFontDescriptor::wrap_under_create_rule(new_descriptor); diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 101520cdbbc220a49b1fa56584729ad93d507fe7..ee67f465e34bd8109246f68b311e225aa8f9fd0a 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -2,15 +2,14 @@ use super::{ BoolExt, MacKeyboardLayout, MacKeyboardMapper, attributed_string::{NSAttributedString, NSMutableAttributedString}, events::key_to_native, - renderer, + ns_string, renderer, }; use crate::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, KeyContext, Keymap, MacDispatcher, MacDisplay, MacWindow, Menu, MenuItem, OsMenu, OwnedMenu, PathPromptOptions, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, - PlatformWindow, Result, SemanticVersion, SystemMenuType, Task, WindowAppearance, WindowParams, - hash, + PlatformWindow, Result, SystemMenuType, Task, WindowAppearance, WindowParams, hash, }; use anyhow::{Context as _, anyhow}; use block::ConcreteBlock; @@ -47,20 +46,23 @@ use objc::{ }; use parking_lot::Mutex; use ptr::null_mut; +use semver::Version; use std::{ cell::Cell, convert::TryInto, ffi::{CStr, OsStr, c_void}, os::{raw::c_char, unix::ffi::OsStrExt}, path::{Path, PathBuf}, - process::Command, ptr, rc::Rc, slice, str, sync::{Arc, OnceLock}, }; use strum::IntoEnumIterator; -use util::ResultExt; +use util::{ + ResultExt, + command::{new_smol_command, new_std_command}, +}; #[allow(non_upper_case_globals)] const NSUTF8StringEncoding: NSUInteger = 4; @@ -387,7 +389,7 @@ impl MacPlatform { ns_string(key_to_native(keystroke.key()).as_ref()), ) .autorelease(); - if Self::os_version() >= SemanticVersion::new(12, 0, 0) { + if Self::os_version() >= Version::new(12, 0, 0) { let _: () = msg_send![item, setAllowsAutomaticKeyEquivalentLocalization: NO]; } item.setKeyEquivalentModifierMask_(mask); @@ -450,15 +452,15 @@ impl MacPlatform { } } - fn os_version() -> SemanticVersion { + fn os_version() -> Version { let version = unsafe { let process_info = NSProcessInfo::processInfo(nil); process_info.operatingSystemVersion() }; - SemanticVersion::new( - version.majorVersion as usize, - version.minorVersion as usize, - version.patchVersion as usize, + Version::new( + version.majorVersion, + version.minorVersion, + version.patchVersion, ) } } @@ -552,7 +554,7 @@ impl Platform for MacPlatform { clippy::disallowed_methods, reason = "We are restarting ourselves, using std command thus is fine" )] - let restart_process = Command::new("/bin/bash") + let restart_process = new_std_command("/bin/bash") .arg("-c") .arg(script) .arg(app_pid) @@ -651,9 +653,12 @@ impl Platform for MacPlatform { fn open_url(&self, url: &str) { unsafe { - let url = NSURL::alloc(nil) - .initWithString_(ns_string(url)) - .autorelease(); + let ns_url = NSURL::alloc(nil).initWithString_(ns_string(url)); + if ns_url.is_null() { + log::error!("Failed to create NSURL from string: {}", url); + return; + } + let url = ns_url.autorelease(); let workspace: id = msg_send![class!(NSWorkspace), sharedWorkspace]; msg_send![workspace, openURL: url] } @@ -663,7 +668,7 @@ impl Platform for MacPlatform { // API only available post Monterey // https://developer.apple.com/documentation/appkit/nsworkspace/3753004-setdefaultapplicationaturl let (done_tx, done_rx) = oneshot::channel(); - if Self::os_version() < SemanticVersion::new(12, 0, 0) { + if Self::os_version() < Version::new(12, 0, 0) { return Task::ready(Err(anyhow!( "macOS 12.0 or later is required to register URL schemes" ))); @@ -807,7 +812,7 @@ impl Platform for MacPlatform { // to break that use-case than breaking `a.sql`. if chunks.len() == 3 && chunks[1].starts_with(chunks[2]) - && Self::os_version() >= SemanticVersion::new(15, 0, 0) + && Self::os_version() >= Version::new(15, 0, 0) { let new_filename = OsStr::from_bytes( &filename.as_bytes() @@ -864,7 +869,7 @@ impl Platform for MacPlatform { .lock() .background_executor .spawn(async move { - if let Some(mut child) = smol::process::Command::new("open") + if let Some(mut child) = new_smol_command("open") .arg(path) .spawn() .context("invoking open command") @@ -1043,6 +1048,7 @@ impl Platform for MacPlatform { ClipboardEntry::Image(image) => { self.write_image_to_clipboard(image); } + ClipboardEntry::ExternalPaths(_) => {} }, None => { // Writing an empty list of entries just clears the clipboard. @@ -1055,13 +1061,15 @@ impl Platform for MacPlatform { let attributed_string = { let mut buf = NSMutableAttributedString::alloc(nil) // TODO can we skip this? Or at least part of it? - .init_attributed_string(NSString::alloc(nil).init_str("")); + .init_attributed_string(ns_string("")) + .autorelease(); for entry in item.entries { if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry { let to_append = NSAttributedString::alloc(nil) - .init_attributed_string(NSString::alloc(nil).init_str(&text)); + .init_attributed_string(ns_string(&text)) + .autorelease(); buf.appendAttributedString_(to_append); } @@ -1129,32 +1137,7 @@ impl Platform for MacPlatform { } } - // Next, check for URL flavors (including file URLs). Some tools only provide a URL - // with no plain text entry. - { - // Try the modern UTType identifiers first. - let file_url_type: id = ns_string("public.file-url"); - let url_type: id = ns_string("public.url"); - - let url_data = if msg_send![types, containsObject: file_url_type] { - pasteboard.dataForType(file_url_type) - } else if msg_send![types, containsObject: url_type] { - pasteboard.dataForType(url_type) - } else { - nil - }; - - if url_data != nil && !url_data.bytes().is_null() { - let bytes = slice::from_raw_parts( - url_data.bytes() as *mut u8, - url_data.length() as usize, - ); - - return Some(self.read_string_from_clipboard(&state, bytes)); - } - } - - // If it wasn't a string or URL, try the various supported image types. + // If it wasn't a string, try the various supported image types. for format in ImageFormat::iter() { if let Some(item) = try_clipboard_image(pasteboard, format) { return Some(item); @@ -1162,7 +1145,7 @@ impl Platform for MacPlatform { } } - // If it wasn't a string, URL, or a supported image type, give up. + // If it wasn't a string or a supported image type, give up. None } @@ -1562,10 +1545,6 @@ extern "C" fn handle_dock_menu(this: &mut Object, _: Sel, _: id) -> id { } } -unsafe fn ns_string(string: &str) -> id { - unsafe { NSString::alloc(nil).init_str(string).autorelease() } -} - unsafe fn ns_url_to_path(url: id) -> Result { let path: *mut c_char = msg_send![url, fileSystemRepresentation]; anyhow::ensure!(!path.is_null(), "url is not a file path: {}", unsafe { @@ -1737,40 +1716,6 @@ mod tests { ); } - #[test] - fn test_file_url_reads_as_url_string() { - let platform = build_platform(); - - // Create a file URL for an arbitrary test path and write it to the pasteboard. - // This path does not need to exist; we only validate URL→path conversion. - let mock_path = "/tmp/zed-clipboard-file-url-test"; - unsafe { - // Build an NSURL from the file path - let url: id = msg_send![class!(NSURL), fileURLWithPath: ns_string(mock_path)]; - let abs: id = msg_send![url, absoluteString]; - - // Encode the URL string as UTF-8 bytes - let len: usize = msg_send![abs, lengthOfBytesUsingEncoding: NSUTF8StringEncoding]; - let bytes_ptr = abs.UTF8String() as *const u8; - let data = NSData::dataWithBytes_length_(nil, bytes_ptr as *const c_void, len as u64); - - // Write as public.file-url to the unique pasteboard - let file_url_type: id = ns_string("public.file-url"); - platform - .0 - .lock() - .pasteboard - .setData_forType(data, file_url_type); - } - - // Ensure the clipboard read returns the URL string, not a converted path - let expected_url = format!("file://{}", mock_path); - assert_eq!( - platform.read_from_clipboard(), - Some(ClipboardItem::new_string(expected_url)) - ); - } - fn build_platform() -> MacPlatform { let platform = MacPlatform::new(false); platform.0.lock().pasteboard = unsafe { NSPasteboard::pasteboardWithUniqueName(nil) }; diff --git a/crates/gpui/src/platform/mac/screen_capture.rs b/crates/gpui/src/platform/mac/screen_capture.rs index 4d4ffa6896520e465dfeb7b1ccc06e1149f9e25d..4b80a87d32f45540c76790065514f29cc7f93b3f 100644 --- a/crates/gpui/src/platform/mac/screen_capture.rs +++ b/crates/gpui/src/platform/mac/screen_capture.rs @@ -1,3 +1,4 @@ +use super::ns_string; use crate::{ DevicePixels, ForegroundExecutor, SharedString, SourceMetadata, platform::{ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream}, @@ -7,7 +8,7 @@ use anyhow::{Result, anyhow}; use block::ConcreteBlock; use cocoa::{ base::{YES, id, nil}, - foundation::{NSArray, NSString}, + foundation::NSArray, }; use collections::HashMap; use core_foundation::base::TCFType; @@ -109,13 +110,21 @@ impl ScreenCaptureSource for MacScreenCaptureSource { let _: id = msg_send![configuration, setHeight: meta.resolution.height.0 as i64]; let stream: id = msg_send![stream, initWithFilter:filter configuration:configuration delegate:delegate]; + // Stream contains filter, configuration, and delegate internally so we release them here + // to prevent a memory leak when steam is dropped + let _: () = msg_send![filter, release]; + let _: () = msg_send![configuration, release]; + let _: () = msg_send![delegate, release]; + let (mut tx, rx) = oneshot::channel(); let mut error: id = nil; let _: () = msg_send![stream, addStreamOutput:output type:SCStreamOutputTypeScreen sampleHandlerQueue:0 error:&mut error as *mut id]; if error != nil { let message: id = msg_send![error, localizedDescription]; - tx.send(Err(anyhow!("failed to add stream output {message:?}"))) + let _: () = msg_send![stream, release]; + let _: () = msg_send![output, release]; + tx.send(Err(anyhow!("failed to add stream output {message:?}"))) .ok(); return rx; } @@ -131,8 +140,10 @@ impl ScreenCaptureSource for MacScreenCaptureSource { }; Ok(Box::new(stream) as Box) } else { + let _: () = msg_send![stream, release]; + let _: () = msg_send![output, release]; let message: id = msg_send![error, localizedDescription]; - Err(anyhow!("failed to stop screen capture stream {message:?}")) + Err(anyhow!("failed to start screen capture stream {message:?}")) }; if let Some(tx) = tx.borrow_mut().take() { tx.send(result).ok(); @@ -195,7 +206,7 @@ unsafe fn screen_id_to_human_label() -> HashMap { let screens: id = msg_send![class!(NSScreen), screens]; let count: usize = msg_send![screens, count]; let mut map = HashMap::default(); - let screen_number_key = unsafe { NSString::alloc(nil).init_str("NSScreenNumber") }; + let screen_number_key = unsafe { ns_string("NSScreenNumber") }; for i in 0..count { let screen: id = msg_send![screens, objectAtIndex: i]; let device_desc: id = msg_send![screen, deviceDescription]; diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index 92135a2c96e5cb4c3587f7f01225be5b1fcd8b43..8595582f4ad7e078f7cfb0140e249feb0a9740dc 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -8,6 +8,7 @@ use anyhow::anyhow; use cocoa::appkit::CGFloat; use collections::HashMap; use core_foundation::{ + array::{CFArray, CFArrayRef}, attributed_string::CFMutableAttributedString, base::{CFRange, TCFType}, number::CFNumber, @@ -21,8 +22,10 @@ use core_graphics::{ }; use core_text::{ font::CTFont, + font_collection::CTFontCollectionRef, font_descriptor::{ - kCTFontSlantTrait, kCTFontSymbolicTrait, kCTFontWeightTrait, kCTFontWidthTrait, + CTFontDescriptor, kCTFontSlantTrait, kCTFontSymbolicTrait, kCTFontWeightTrait, + kCTFontWidthTrait, }, line::CTLine, string_attributes::kCTFontAttributeName, @@ -97,7 +100,26 @@ impl PlatformTextSystem for MacTextSystem { fn all_font_names(&self) -> Vec { let mut names = Vec::new(); let collection = core_text::font_collection::create_for_all_families(); - let Some(descriptors) = collection.get_descriptors() else { + // NOTE: We intentionally avoid using `collection.get_descriptors()` here because + // it has a memory leak bug in core-text v21.0.0. The upstream code uses + // `wrap_under_get_rule` but `CTFontCollectionCreateMatchingFontDescriptors` + // follows the Create Rule (caller owns the result), so it should use + // `wrap_under_create_rule`. We call the function directly with correct memory management. + unsafe extern "C" { + fn CTFontCollectionCreateMatchingFontDescriptors( + collection: CTFontCollectionRef, + ) -> CFArrayRef; + } + let descriptors: Option> = unsafe { + let array_ref = + CTFontCollectionCreateMatchingFontDescriptors(collection.as_concrete_TypeRef()); + if array_ref.is_null() { + None + } else { + Some(CFArray::wrap_under_create_rule(array_ref)) + } + }; + let Some(descriptors) = descriptors else { return names; }; for descriptor in descriptors.into_iter() { @@ -435,6 +457,7 @@ impl MacTextSystemState { { let mut text = text; + let mut break_ligature = true; for run in font_runs { let text_run; (text_run, text) = text.split_at(run.len); @@ -444,7 +467,8 @@ impl MacTextSystemState { string.replace_str(&CFString::new(text_run), CFRange::init(utf16_start, 0)); let utf16_end = string.char_len(); - let cf_range = CFRange::init(utf16_start, utf16_end - utf16_start); + let length = utf16_end - utf16_start; + let cf_range = CFRange::init(utf16_start, length); let font = &self.fonts[run.font_id.0]; let font_metrics = font.metrics(); @@ -452,6 +476,11 @@ impl MacTextSystemState { max_ascent = max_ascent.max(font_metrics.ascent * font_scale); max_descent = max_descent.max(-font_metrics.descent * font_scale); + let font_size = if break_ligature { + px(font_size.0.next_up()) + } else { + font_size + }; unsafe { string.set_attribute( cf_range, @@ -459,6 +488,7 @@ impl MacTextSystemState { &font.native_font().clone_with_font_size(font_size.into()), ); } + break_ligature = !break_ligature; } } // Retrieve the glyphs from the shaped line, converting UTF16 offsets to UTF8 offsets. diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 11ea4fb7e272c0660c7981cabf8f8c74ffa71830..14b0113c7cf44fa9574bfcca30b46fb988b5e380 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -153,6 +153,10 @@ unsafe fn build_classes() { sel!(mouseMoved:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(pressureChangeWithEvent:), + handle_view_event as extern "C" fn(&Object, Sel, id), + ); decl.add_method( sel!(mouseExited:), handle_view_event as extern "C" fn(&Object, Sel, id), @@ -781,7 +785,7 @@ impl MacWindow { native_window.setAcceptsMouseMovedEvents_(YES); if let Some(tabbing_identifier) = tabbing_identifier { - let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str()); + let tabbing_id = ns_string(tabbing_identifier.as_str()); let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id]; } else { let _: () = msg_send![native_window, setTabbingIdentifier:nil]; @@ -904,8 +908,8 @@ impl MacWindow { pub fn get_user_tabbing_preference() -> Option { unsafe { let defaults: id = NSUserDefaults::standardUserDefaults(); - let domain = NSString::alloc(nil).init_str("NSGlobalDomain"); - let key = NSString::alloc(nil).init_str("AppleWindowTabbingMode"); + let domain = ns_string("NSGlobalDomain"); + let key = ns_string("AppleWindowTabbingMode"); let dict: id = msg_send![defaults, persistentDomainForName: domain]; let value: id = if !dict.is_null() { @@ -1033,7 +1037,7 @@ impl PlatformWindow for MacWindow { } if let Some(tabbing_identifier) = tabbing_identifier { - let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str()); + let tabbing_id = ns_string(tabbing_identifier.as_str()); let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id]; } else { let _: () = msg_send![native_window, setTabbingIdentifier:nil]; @@ -1059,10 +1063,8 @@ impl PlatformWindow for MacWindow { return None; } let device_description: id = msg_send![screen, deviceDescription]; - let screen_number: id = NSDictionary::valueForKey_( - device_description, - NSString::alloc(nil).init_str("NSScreenNumber"), - ); + let screen_number: id = + NSDictionary::valueForKey_(device_description, ns_string("NSScreenNumber")); let screen_number: u32 = msg_send![screen_number, unsignedIntValue]; @@ -1188,6 +1190,7 @@ impl PlatformWindow for MacWindow { let (done_tx, done_rx) = oneshot::channel(); let done_tx = Cell::new(Some(done_tx)); let block = ConcreteBlock::new(move |answer: NSInteger| { + let _: () = msg_send![alert, release]; if let Some(done_tx) = done_tx.take() { let _ = done_tx.send(answer.try_into().unwrap()); } @@ -1505,8 +1508,8 @@ impl PlatformWindow for MacWindow { .spawn(async move { unsafe { let defaults: id = NSUserDefaults::standardUserDefaults(); - let domain = NSString::alloc(nil).init_str("NSGlobalDomain"); - let key = NSString::alloc(nil).init_str("AppleActionOnDoubleClick"); + let domain = ns_string("NSGlobalDomain"); + let key = ns_string("AppleActionOnDoubleClick"); let dict: id = msg_send![defaults, persistentDomainForName: domain]; let action: id = if !dict.is_null() { @@ -1543,6 +1546,17 @@ impl PlatformWindow for MacWindow { }) .detach(); } + + fn start_window_move(&self) { + let this = self.0.lock(); + let window = this.native_window; + + unsafe { + let app = NSApplication::sharedApplication(nil); + let mut event: id = msg_send![app, currentEvent]; + let _: () = msg_send![window, performWindowDragWithEvent: event]; + } + } } impl rwh::HasWindowHandle for MacWindow { @@ -1967,10 +1981,36 @@ extern "C" fn window_did_move(this: &Object, _: Sel, _: id) { } } +// Update the window scale factor and drawable size, and call the resize callback if any. +fn update_window_scale_factor(window_state: &Arc>) { + let mut lock = window_state.as_ref().lock(); + let scale_factor = lock.scale_factor(); + let size = lock.content_size(); + let drawable_size = size.to_device_pixels(scale_factor); + unsafe { + let _: () = msg_send![ + lock.renderer.layer(), + setContentsScale: scale_factor as f64 + ]; + } + + lock.renderer.update_drawable_size(drawable_size); + + if let Some(mut callback) = lock.resize_callback.take() { + let content_size = lock.content_size(); + let scale_factor = lock.scale_factor(); + drop(lock); + callback(content_size, scale_factor); + window_state.as_ref().lock().resize_callback = Some(callback); + }; +} + extern "C" fn window_did_change_screen(this: &Object, _: Sel, _: id) { let window_state = unsafe { get_window_state(this) }; let mut lock = window_state.as_ref().lock(); lock.start_display_link(); + drop(lock); + update_window_scale_factor(&window_state); } extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id) { @@ -2079,27 +2119,7 @@ extern "C" fn make_backing_layer(this: &Object, _: Sel) -> id { extern "C" fn view_did_change_backing_properties(this: &Object, _: Sel) { let window_state = unsafe { get_window_state(this) }; - let mut lock = window_state.as_ref().lock(); - - let scale_factor = lock.scale_factor(); - let size = lock.content_size(); - let drawable_size = size.to_device_pixels(scale_factor); - unsafe { - let _: () = msg_send![ - lock.renderer.layer(), - setContentsScale: scale_factor as f64 - ]; - } - - lock.renderer.update_drawable_size(drawable_size); - - if let Some(mut callback) = lock.resize_callback.take() { - let content_size = lock.content_size(); - let scale_factor = lock.scale_factor(); - drop(lock); - callback(content_size, scale_factor); - window_state.as_ref().lock().resize_callback = Some(callback); - }; + update_window_scale_factor(&window_state); } extern "C" fn set_frame_size(this: &Object, _: Sel, size: NSSize) { @@ -2491,7 +2511,7 @@ where unsafe fn display_id_for_screen(screen: id) -> CGDirectDisplayID { unsafe { let device_description = NSScreen::deviceDescription(screen); - let screen_number_key: id = NSString::alloc(nil).init_str("NSScreenNumber"); + let screen_number_key: id = ns_string("NSScreenNumber"); let screen_number = device_description.objectForKey_(screen_number_key); let screen_number: NSUInteger = msg_send![screen_number, unsignedIntegerValue]; screen_number as CGDirectDisplayID @@ -2537,7 +2557,7 @@ unsafe fn remove_layer_background(layer: id) { // `description` reflects its name and some parameters. Currently `NSVisualEffectView` // uses a `CAFilter` named "colorSaturate". If one day they switch to `CIFilter`, the // `description` will still contain "Saturat" ("... inputSaturation = ..."). - let test_string: id = NSString::alloc(nil).init_str("Saturat").autorelease(); + let test_string: id = ns_string("Saturat"); let count = NSArray::count(filters); for i in 0..count { let description: id = msg_send![filters.objectAtIndex(i), description]; diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index 017c29bfb558f77874a9729a52b518d9d41fb256..c271430586106abc93e0bb3258c9e25a06b12383 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -1,5 +1,4 @@ -use crate::{PlatformDispatcher, TaskLabel}; -use async_task::Runnable; +use crate::{PlatformDispatcher, Priority, RunnableVariant, TaskLabel}; use backtrace::Backtrace; use collections::{HashMap, HashSet, VecDeque}; use parking::Unparker; @@ -26,10 +25,10 @@ pub struct TestDispatcher { struct TestDispatcherState { random: StdRng, - foreground: HashMap>, - background: Vec, - deprioritized_background: Vec, - delayed: Vec<(Duration, Runnable)>, + foreground: HashMap>, + background: Vec, + deprioritized_background: Vec, + delayed: Vec<(Duration, RunnableVariant)>, start_time: Instant, time: Duration, is_main_thread: bool, @@ -39,7 +38,7 @@ struct TestDispatcherState { waiting_backtrace: Option, deprioritized_task_labels: HashSet, block_on_ticks: RangeInclusive, - last_parked: Option, + unparkers: Vec, } impl TestDispatcher { @@ -59,7 +58,7 @@ impl TestDispatcher { waiting_backtrace: None, deprioritized_task_labels: Default::default(), block_on_ticks: 0..=1000, - last_parked: None, + unparkers: Default::default(), }; TestDispatcher { @@ -175,7 +174,13 @@ impl TestDispatcher { let was_main_thread = state.is_main_thread; state.is_main_thread = main_thread; drop(state); - runnable.run(); + + // todo(localcc): add timings to tests + match runnable { + RunnableVariant::Meta(runnable) => runnable.run(), + RunnableVariant::Compat(runnable) => runnable.run(), + }; + self.state.lock().is_main_thread = was_main_thread; true @@ -240,20 +245,14 @@ impl TestDispatcher { let block_on_ticks = lock.block_on_ticks.clone(); lock.random.random_range(block_on_ticks) } - pub fn unpark_last(&self) { - self.state - .lock() - .last_parked - .take() - .as_ref() - .map(Unparker::unpark); + + pub fn unpark_all(&self) { + self.state.lock().unparkers.retain(|parker| parker.unpark()); } - pub fn set_unparker(&self, unparker: Unparker) { - let last = { self.state.lock().last_parked.replace(unparker) }; - if let Some(last) = last { - last.unpark(); - } + pub fn push_unparker(&self, unparker: Unparker) { + let mut state = self.state.lock(); + state.unparkers.push(unparker); } } @@ -268,6 +267,14 @@ impl Clone for TestDispatcher { } impl PlatformDispatcher for TestDispatcher { + fn get_all_timings(&self) -> Vec { + Vec::new() + } + + fn get_current_thread_timings(&self) -> Vec { + Vec::new() + } + fn is_main_thread(&self) -> bool { self.state.lock().is_main_thread } @@ -277,7 +284,7 @@ impl PlatformDispatcher for TestDispatcher { state.start_time + state.time } - fn dispatch(&self, runnable: Runnable, label: Option) { + fn dispatch(&self, runnable: RunnableVariant, label: Option, _priority: Priority) { { let mut state = self.state.lock(); if label.is_some_and(|label| state.deprioritized_task_labels.contains(&label)) { @@ -286,20 +293,20 @@ impl PlatformDispatcher for TestDispatcher { state.background.push(runnable); } } - self.unpark_last(); + self.unpark_all(); } - fn dispatch_on_main_thread(&self, runnable: Runnable) { + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) { self.state .lock() .foreground .entry(self.id) .or_default() .push_back(runnable); - self.unpark_last(); + self.unpark_all(); } - fn dispatch_after(&self, duration: std::time::Duration, runnable: Runnable) { + fn dispatch_after(&self, duration: std::time::Duration, runnable: RunnableVariant) { let mut state = self.state.lock(); let next_time = state.time + duration; let ix = match state.delayed.binary_search_by_key(&next_time, |e| e.0) { @@ -311,4 +318,10 @@ impl PlatformDispatcher for TestDispatcher { fn as_test(&self) -> Option<&TestDispatcher> { Some(self) } + + fn spawn_realtime(&self, _priority: crate::RealtimePriority, f: Box) { + std::thread::spawn(move || { + f(); + }); + } } diff --git a/crates/gpui/src/platform/windows/clipboard.rs b/crates/gpui/src/platform/windows/clipboard.rs index 90d97a84c0bedcc241f7432a7f14f09d46018b49..2a5e8dcbbe2426674f7eae173231e2919590ad49 100644 --- a/crates/gpui/src/platform/windows/clipboard.rs +++ b/crates/gpui/src/platform/windows/clipboard.rs @@ -1,7 +1,7 @@ use std::sync::LazyLock; use anyhow::Result; -use collections::{FxHashMap, FxHashSet}; +use collections::FxHashMap; use itertools::Itertools; use windows::Win32::{ Foundation::{HANDLE, HGLOBAL}, @@ -18,7 +18,9 @@ use windows::Win32::{ }; use windows_core::PCWSTR; -use crate::{ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, hash}; +use crate::{ + ClipboardEntry, ClipboardItem, ClipboardString, ExternalPaths, Image, ImageFormat, hash, +}; // https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; @@ -48,16 +50,6 @@ static FORMATS_MAP: LazyLock> = LazyLock::ne formats_map.insert(CF_HDROP.0 as u32, ClipboardFormatType::Files); formats_map }); -static FORMATS_SET: LazyLock> = LazyLock::new(|| { - let mut formats_map = FxHashSet::default(); - formats_map.insert(CF_UNICODETEXT.0 as u32); - formats_map.insert(*CLIPBOARD_PNG_FORMAT); - formats_map.insert(*CLIPBOARD_GIF_FORMAT); - formats_map.insert(*CLIPBOARD_JPG_FORMAT); - formats_map.insert(*CLIPBOARD_SVG_FORMAT); - formats_map.insert(CF_HDROP.0 as u32); - formats_map -}); static IMAGE_FORMATS_MAP: LazyLock> = LazyLock::new(|| { let mut formats_map = FxHashMap::default(); formats_map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png); @@ -138,6 +130,11 @@ fn register_clipboard_format(format: PCWSTR) -> u32 { std::io::Error::last_os_error() ); } + log::debug!( + "Registered clipboard format {} as {}", + unsafe { format.display() }, + ret + ); ret } @@ -159,6 +156,7 @@ fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> { ClipboardEntry::Image(image) => { write_image_to_clipboard(image)?; } + ClipboardEntry::ExternalPaths(_) => {} }, None => { // Writing an empty list of entries just clears the clipboard. @@ -249,19 +247,33 @@ fn with_best_match_format(f: F) -> Option where F: Fn(u32) -> Option, { + let mut text = None; + let mut image = None; + let mut files = None; let count = unsafe { CountClipboardFormats() }; let mut clipboard_format = 0; for _ in 0..count { clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; - let Some(item_format) = FORMATS_SET.get(&clipboard_format) else { + let Some(item_format) = FORMATS_MAP.get(&clipboard_format) else { continue; }; - if let Some(entry) = f(*item_format) { - return Some(ClipboardItem { - entries: vec![entry], - }); + let bucket = match item_format { + ClipboardFormatType::Text if text.is_none() => &mut text, + ClipboardFormatType::Image if image.is_none() => &mut image, + ClipboardFormatType::Files if files.is_none() => &mut files, + _ => continue, + }; + if let Some(entry) = f(clipboard_format) { + *bucket = Some(entry); } } + + if let Some(entry) = [image, files, text].into_iter().flatten().next() { + return Some(ClipboardItem { + entries: vec![entry], + }); + } + // log the formats that we don't support yet. { clipboard_format = 0; @@ -346,18 +358,17 @@ fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option Option { - let text = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr, _size| { + let filenames = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr, _size| { let hdrop = HDROP(data_ptr); - let mut filenames = String::new(); + let mut filenames = Vec::new(); with_file_names(hdrop, |file_name| { - filenames.push_str(&file_name); + filenames.push(std::path::PathBuf::from(file_name)); }); filenames })?; - Some(ClipboardEntry::String(ClipboardString { - text, - metadata: None, - })) + Some(ClipboardEntry::ExternalPaths(ExternalPaths( + filenames.into(), + ))) } fn with_clipboard_data(format: u32, f: F) -> Option diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index cb22948898fd496d6820e29088a9be7c5c502341..22b8e6231aa0812b63a899cfa61c94a258700079 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -608,6 +608,7 @@ impl DirectWriteState { let mut first_run = true; let mut ascent = Pixels::default(); let mut descent = Pixels::default(); + let mut break_ligatures = false; for run in font_runs { if first_run { first_run = false; @@ -616,6 +617,7 @@ impl DirectWriteState { text_layout.GetLineMetrics(Some(&mut metrics), &mut line_count as _)?; ascent = px(metrics[0].baseline); descent = px(metrics[0].height - metrics[0].baseline); + break_ligatures = !break_ligatures; continue; } let font_info = &self.fonts[run.font_id.0]; @@ -636,10 +638,17 @@ impl DirectWriteState { text_layout.SetFontCollection(collection, text_range)?; text_layout .SetFontFamilyName(&HSTRING::from(&font_info.font_family), text_range)?; - text_layout.SetFontSize(font_size.0, text_range)?; + let font_size = if break_ligatures { + font_size.0.next_up() + } else { + font_size.0 + }; + text_layout.SetFontSize(font_size, text_range)?; text_layout.SetFontStyle(font_info.font_face.GetStyle(), text_range)?; text_layout.SetFontWeight(font_info.font_face.GetWeight(), text_range)?; text_layout.SetTypography(&font_info.features, text_range)?; + + break_ligatures = !break_ligatures; } let mut runs = Vec::new(); @@ -1506,7 +1515,7 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { id, position: point( px(context.width + glyph_offsets[this_glyph_idx].advanceOffset), - px(0.0), + px(-glyph_offsets[this_glyph_idx].ascenderOffset), ), index: context.index_converter.utf8_ix, is_emoji, diff --git a/crates/gpui/src/platform/windows/directx_atlas.rs b/crates/gpui/src/platform/windows/directx_atlas.rs index 38c22a41bf9d32cf43f585050390b75602a6bf42..9deae392d1a5ef18a6af644031f047780fa23f70 100644 --- a/crates/gpui/src/platform/windows/directx_atlas.rs +++ b/crates/gpui/src/platform/windows/directx_atlas.rs @@ -234,11 +234,14 @@ impl DirectXAtlasState { } fn texture(&self, id: AtlasTextureId) -> &DirectXAtlasTexture { - let textures = match id.kind { - crate::AtlasTextureKind::Monochrome => &self.monochrome_textures, - crate::AtlasTextureKind::Polychrome => &self.polychrome_textures, - }; - textures[id.index as usize].as_ref().unwrap() + match id.kind { + crate::AtlasTextureKind::Monochrome => &self.monochrome_textures[id.index as usize] + .as_ref() + .unwrap(), + crate::AtlasTextureKind::Polychrome => &self.polychrome_textures[id.index as usize] + .as_ref() + .unwrap(), + } } } diff --git a/crates/gpui/src/platform/windows/directx_renderer.rs b/crates/gpui/src/platform/windows/directx_renderer.rs index b4180708aa510158456e6b9b0fe1ba1e0dfea85b..608ac2c3b065c598547be8b79f8d7fae8070ff48 100644 --- a/crates/gpui/src/platform/windows/directx_renderer.rs +++ b/crates/gpui/src/platform/windows/directx_renderer.rs @@ -48,6 +48,12 @@ pub(crate) struct DirectXRenderer { width: u32, height: u32, + + /// Whether we want to skip drwaing due to device lost events. + /// + /// In that case we want to discard the first frame that we draw as we got reset in the middle of a frame + /// meaning we lost all the allocated gpu textures and scene resources. + skip_draws: bool, } /// Direct3D objects @@ -167,6 +173,7 @@ impl DirectXRenderer { font_info: Self::get_font_info(), width: 1, height: 1, + skip_draws: false, }) } @@ -192,8 +199,13 @@ impl DirectXRenderer { }], )?; unsafe { - device_context - .ClearRenderTargetView(resources.render_target_view.as_ref().unwrap(), &[0.0; 4]); + device_context.ClearRenderTargetView( + resources + .render_target_view + .as_ref() + .context("missing render target view")?, + &[0.0; 4], + ); device_context .OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None); device_context.RSSetViewports(Some(slice::from_ref(&resources.viewport))); @@ -283,10 +295,16 @@ impl DirectXRenderer { self.globals = globals; self.pipelines = pipelines; self.direct_composition = direct_composition; + self.skip_draws = true; Ok(()) } pub(crate) fn draw(&mut self, scene: &Scene) -> Result<()> { + if self.skip_draws { + // skip drawing this frame, we just recovered from a device lost event + // and so likely do not have the textures anymore that are required for drawing + return Ok(()); + } self.pre_draw()?; for batch in scene.batches() { match batch { @@ -306,14 +324,18 @@ impl DirectXRenderer { sprites, } => self.draw_polychrome_sprites(texture_id, sprites), PrimitiveBatch::Surfaces(surfaces) => self.draw_surfaces(surfaces), - }.context(format!("scene too large: {} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces", - scene.paths.len(), - scene.shadows.len(), - scene.quads.len(), - scene.underlines.len(), - scene.monochrome_sprites.len(), - scene.polychrome_sprites.len(), - scene.surfaces.len(),))?; + } + .context(format!( + "scene too large:\ + {} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces", + scene.paths.len(), + scene.shadows.len(), + scene.quads.len(), + scene.underlines.len(), + scene.monochrome_sprites.len(), + scene.polychrome_sprites.len(), + scene.surfaces.len(), + ))?; } self.present() } @@ -352,6 +374,7 @@ impl DirectXRenderer { } resources.recreate_resources(devices, width, height)?; + unsafe { devices .device_context @@ -647,6 +670,10 @@ impl DirectXRenderer { } }) } + + pub(crate) fn mark_drawable(&mut self) { + self.skip_draws = false; + } } impl DirectXResources { diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index f60d6bd884d7bfe6b313a7ca555067991172fe31..14486ccee9843ef9c0792d62f22fa825f0db43ee 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -1,16 +1,13 @@ use std::{ sync::atomic::{AtomicBool, Ordering}, thread::{ThreadId, current}, - time::Duration, + time::{Duration, Instant}, }; -use async_task::Runnable; use flume::Sender; use util::ResultExt; use windows::{ - System::Threading::{ - ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority, - }, + System::Threading::{ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler}, Win32::{ Foundation::{LPARAM, WPARAM}, UI::WindowsAndMessaging::PostMessageW, @@ -18,20 +15,21 @@ use windows::{ }; use crate::{ - HWND, PlatformDispatcher, SafeHwnd, TaskLabel, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, + GLOBAL_THREAD_TIMINGS, HWND, PlatformDispatcher, RunnableVariant, SafeHwnd, THREAD_TIMINGS, + TaskLabel, TaskTiming, ThreadTaskTimings, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, }; pub(crate) struct WindowsDispatcher { pub(crate) wake_posted: AtomicBool, - main_sender: Sender, + main_sender: Sender, main_thread_id: ThreadId, - platform_window_handle: SafeHwnd, + pub(crate) platform_window_handle: SafeHwnd, validation_number: usize, } impl WindowsDispatcher { pub(crate) fn new( - main_sender: Sender, + main_sender: Sender, platform_window_handle: HWND, validation_number: usize, ) -> Self { @@ -47,42 +45,120 @@ impl WindowsDispatcher { } } - fn dispatch_on_threadpool(&self, runnable: Runnable) { + fn dispatch_on_threadpool(&self, runnable: RunnableVariant) { let handler = { let mut task_wrapper = Some(runnable); WorkItemHandler::new(move |_| { - task_wrapper.take().unwrap().run(); + Self::execute_runnable(task_wrapper.take().unwrap()); Ok(()) }) }; - ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err(); + ThreadPool::RunAsync(&handler).log_err(); } - fn dispatch_on_threadpool_after(&self, runnable: Runnable, duration: Duration) { + fn dispatch_on_threadpool_after(&self, runnable: RunnableVariant, duration: Duration) { let handler = { let mut task_wrapper = Some(runnable); TimerElapsedHandler::new(move |_| { - task_wrapper.take().unwrap().run(); + Self::execute_runnable(task_wrapper.take().unwrap()); Ok(()) }) }; ThreadPoolTimer::CreateTimer(&handler, duration.into()).log_err(); } + + #[inline(always)] + pub(crate) fn execute_runnable(runnable: RunnableVariant) { + let start = Instant::now(); + + let mut timing = match runnable { + RunnableVariant::Meta(runnable) => { + let location = runnable.metadata().location; + let timing = TaskTiming { + location, + start, + end: None, + }; + Self::add_task_timing(timing); + + runnable.run(); + + timing + } + RunnableVariant::Compat(runnable) => { + let timing = TaskTiming { + location: core::panic::Location::caller(), + start, + end: None, + }; + Self::add_task_timing(timing); + + runnable.run(); + + timing + } + }; + + let end = Instant::now(); + timing.end = Some(end); + + Self::add_task_timing(timing); + } + + pub(crate) fn add_task_timing(timing: TaskTiming) { + THREAD_TIMINGS.with(|timings| { + let mut timings = timings.lock(); + let timings = &mut timings.timings; + + if let Some(last_timing) = timings.iter_mut().rev().next() { + if last_timing.location == timing.location { + last_timing.end = timing.end; + return; + } + } + + timings.push_back(timing); + }); + } } impl PlatformDispatcher for WindowsDispatcher { + fn get_all_timings(&self) -> Vec { + let global_thread_timings = GLOBAL_THREAD_TIMINGS.lock(); + ThreadTaskTimings::convert(&global_thread_timings) + } + + fn get_current_thread_timings(&self) -> Vec { + THREAD_TIMINGS.with(|timings| { + let timings = timings.lock(); + let timings = &timings.timings; + + let mut vec = Vec::with_capacity(timings.len()); + + let (s1, s2) = timings.as_slices(); + vec.extend_from_slice(s1); + vec.extend_from_slice(s2); + vec + }) + } + fn is_main_thread(&self) -> bool { current().id() == self.main_thread_id } - fn dispatch(&self, runnable: Runnable, label: Option) { + fn dispatch( + &self, + runnable: RunnableVariant, + label: Option, + _priority: gpui::Priority, + ) { self.dispatch_on_threadpool(runnable); if let Some(label) = label { log::debug!("TaskLabel: {label:?}"); } } - fn dispatch_on_main_thread(&self, runnable: Runnable) { + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: gpui::Priority) { match self.main_sender.send(runnable) { Ok(_) => { if !self.wake_posted.swap(true, Ordering::AcqRel) { @@ -111,7 +187,12 @@ impl PlatformDispatcher for WindowsDispatcher { } } - fn dispatch_after(&self, duration: Duration, runnable: Runnable) { + fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { self.dispatch_on_threadpool_after(runnable, duration); } + + fn spawn_realtime(&self, _priority: crate::RealtimePriority, _f: Box) { + // disabled on windows for now. + unimplemented!(); + } } diff --git a/crates/gpui/src/platform/windows/display.rs b/crates/gpui/src/platform/windows/display.rs index 79716c951da783f48e333a9a5dae85bd7bb34a67..720d459c1ce3b0251d8009dc2b77864727ed5441 100644 --- a/crates/gpui/src/platform/windows/display.rs +++ b/crates/gpui/src/platform/windows/display.rs @@ -23,6 +23,7 @@ pub(crate) struct WindowsDisplay { pub display_id: DisplayId, scale_factor: f32, bounds: Bounds, + visible_bounds: Bounds, physical_bounds: Bounds, uuid: Uuid, } @@ -36,6 +37,7 @@ impl WindowsDisplay { let screen = available_monitors().into_iter().nth(display_id.0 as _)?; let info = get_monitor_info(screen).log_err()?; let monitor_size = info.monitorInfo.rcMonitor; + let work_area = info.monitorInfo.rcWork; let uuid = generate_uuid(&info.szDevice); let scale_factor = get_scale_factor_for_monitor(screen).log_err()?; let physical_size = size( @@ -55,6 +57,14 @@ impl WindowsDisplay { ), size: physical_size.to_pixels(scale_factor), }, + visible_bounds: Bounds { + origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor), + size: size( + (work_area.right - work_area.left) as f32 / scale_factor, + (work_area.bottom - work_area.top) as f32 / scale_factor, + ) + .map(crate::px), + }, physical_bounds: Bounds { origin: point(monitor_size.left.into(), monitor_size.top.into()), size: physical_size, @@ -63,22 +73,22 @@ impl WindowsDisplay { }) } - pub fn new_with_handle(monitor: HMONITOR) -> Self { - let info = get_monitor_info(monitor).expect("unable to get monitor info"); + pub fn new_with_handle(monitor: HMONITOR) -> anyhow::Result { + let info = get_monitor_info(monitor)?; let monitor_size = info.monitorInfo.rcMonitor; + let work_area = info.monitorInfo.rcWork; let uuid = generate_uuid(&info.szDevice); let display_id = available_monitors() .iter() .position(|handle| handle.0 == monitor.0) .unwrap(); - let scale_factor = - get_scale_factor_for_monitor(monitor).expect("unable to get scale factor for monitor"); + let scale_factor = get_scale_factor_for_monitor(monitor)?; let physical_size = size( (monitor_size.right - monitor_size.left).into(), (monitor_size.bottom - monitor_size.top).into(), ); - WindowsDisplay { + Ok(WindowsDisplay { handle: monitor, display_id: DisplayId(display_id as _), scale_factor, @@ -90,26 +100,34 @@ impl WindowsDisplay { ), size: physical_size.to_pixels(scale_factor), }, + visible_bounds: Bounds { + origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor), + size: size( + (work_area.right - work_area.left) as f32 / scale_factor, + (work_area.bottom - work_area.top) as f32 / scale_factor, + ) + .map(crate::px), + }, physical_bounds: Bounds { origin: point(monitor_size.left.into(), monitor_size.top.into()), size: physical_size, }, uuid, - } + }) } - fn new_with_handle_and_id(handle: HMONITOR, display_id: DisplayId) -> Self { - let info = get_monitor_info(handle).expect("unable to get monitor info"); + fn new_with_handle_and_id(handle: HMONITOR, display_id: DisplayId) -> anyhow::Result { + let info = get_monitor_info(handle)?; let monitor_size = info.monitorInfo.rcMonitor; + let work_area = info.monitorInfo.rcWork; let uuid = generate_uuid(&info.szDevice); - let scale_factor = - get_scale_factor_for_monitor(handle).expect("unable to get scale factor for monitor"); + let scale_factor = get_scale_factor_for_monitor(handle)?; let physical_size = size( (monitor_size.right - monitor_size.left).into(), (monitor_size.bottom - monitor_size.top).into(), ); - WindowsDisplay { + Ok(WindowsDisplay { handle, display_id, scale_factor, @@ -121,12 +139,20 @@ impl WindowsDisplay { ), size: physical_size.to_pixels(scale_factor), }, + visible_bounds: Bounds { + origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor), + size: size( + (work_area.right - work_area.left) as f32 / scale_factor, + (work_area.bottom - work_area.top) as f32 / scale_factor, + ) + .map(crate::px), + }, physical_bounds: Bounds { origin: point(monitor_size.left.into(), monitor_size.top.into()), size: physical_size, }, uuid, - } + }) } pub fn primary_monitor() -> Option { @@ -140,7 +166,7 @@ impl WindowsDisplay { ); return None; } - Some(WindowsDisplay::new_with_handle(monitor)) + WindowsDisplay::new_with_handle(monitor).log_err() } /// Check if the center point of given bounds is inside this monitor @@ -154,7 +180,9 @@ impl WindowsDisplay { if monitor.is_invalid() { false } else { - let display = WindowsDisplay::new_with_handle(monitor); + let Ok(display) = WindowsDisplay::new_with_handle(monitor) else { + return false; + }; display.uuid == self.uuid } } @@ -163,11 +191,10 @@ impl WindowsDisplay { available_monitors() .into_iter() .enumerate() - .map(|(id, handle)| { - Rc::new(WindowsDisplay::new_with_handle_and_id( - handle, - DisplayId(id as _), - )) as Rc + .filter_map(|(id, handle)| { + Some(Rc::new( + WindowsDisplay::new_with_handle_and_id(handle, DisplayId(id as _)).ok()?, + ) as Rc) }) .collect() } @@ -194,6 +221,10 @@ impl PlatformDisplay for WindowsDisplay { fn bounds(&self) -> Bounds { self.bounds } + + fn visible_bounds(&self) -> Bounds { + self.visible_bounds + } } fn available_monitors() -> SmallVec<[HMONITOR; 4]> { diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 4e6df63106f4c650ad3130e39d410670ddc4687d..e6fa6006eb95ec45f1634cb72ef63e2f622455a7 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -51,7 +51,7 @@ impl WindowsWindowInner { WM_NCCALCSIZE => self.handle_calc_client_size(handle, wparam, lparam), WM_DPICHANGED => self.handle_dpi_changed_msg(handle, wparam, lparam), WM_DISPLAYCHANGE => self.handle_display_change_msg(handle), - WM_NCHITTEST => self.handle_hit_test_msg(handle, msg, wparam, lparam), + WM_NCHITTEST => self.handle_hit_test_msg(handle, lparam), WM_PAINT => self.handle_paint_msg(handle), WM_CLOSE => self.handle_close_msg(), WM_DESTROY => self.handle_destroy_msg(handle), @@ -116,17 +116,16 @@ impl WindowsWindowInner { } fn handle_move_msg(&self, handle: HWND, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); let origin = logical_point( lparam.signed_loword() as f32, lparam.signed_hiword() as f32, - lock.scale_factor, + self.state.scale_factor.get(), ); - lock.origin = origin; - let size = lock.logical_size; + self.state.origin.set(origin); + let size = self.state.logical_size.get(); let center_x = origin.x.0 + size.width.0 / 2.; let center_y = origin.y.0 + size.height.0 / 2.; - let monitor_bounds = lock.display.bounds(); + let monitor_bounds = self.state.display.get().bounds(); if center_x < monitor_bounds.left().0 || center_x > monitor_bounds.right().0 || center_y < monitor_bounds.top().0 @@ -136,42 +135,42 @@ impl WindowsWindowInner { let monitor = unsafe { MonitorFromWindow(handle, MONITOR_DEFAULTTONULL) }; // minimize the window can trigger this event too, in this case, // monitor is invalid, we do nothing. - if !monitor.is_invalid() && lock.display.handle != monitor { + if !monitor.is_invalid() && self.state.display.get().handle != monitor { // we will get the same monitor if we only have one - lock.display = WindowsDisplay::new_with_handle(monitor); + self.state + .display + .set(WindowsDisplay::new_with_handle(monitor).log_err()?); } } - if let Some(mut callback) = lock.callbacks.moved.take() { - drop(lock); + if let Some(mut callback) = self.state.callbacks.moved.take() { callback(); - self.state.borrow_mut().callbacks.moved = Some(callback); + self.state.callbacks.moved.set(Some(callback)); } Some(0) } fn handle_get_min_max_info_msg(&self, lparam: LPARAM) -> Option { - let lock = self.state.borrow(); - let min_size = lock.min_size?; - let scale_factor = lock.scale_factor; - let boarder_offset = lock.border_offset; - drop(lock); + let min_size = self.state.min_size?; + let scale_factor = self.state.scale_factor.get(); + let boarder_offset = &self.state.border_offset; + unsafe { let minmax_info = &mut *(lparam.0 as *mut MINMAXINFO); minmax_info.ptMinTrackSize.x = - min_size.width.scale(scale_factor).0 as i32 + boarder_offset.width_offset; + min_size.width.scale(scale_factor).0 as i32 + boarder_offset.width_offset.get(); minmax_info.ptMinTrackSize.y = - min_size.height.scale(scale_factor).0 as i32 + boarder_offset.height_offset; + min_size.height.scale(scale_factor).0 as i32 + boarder_offset.height_offset.get(); } Some(0) } fn handle_size_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); - // Don't resize the renderer when the window is minimized, but record that it was minimized so // that on restore the swap chain can be recreated via `update_drawable_size_even_if_unchanged`. if wparam.0 == SIZE_MINIMIZED as usize { - lock.restore_from_minimized = lock.callbacks.request_frame.take(); + self.state + .restore_from_minimized + .set(self.state.callbacks.request_frame.take()); return Some(0); } @@ -179,14 +178,16 @@ impl WindowsWindowInner { let height = lparam.hiword().max(1) as i32; let new_size = size(DevicePixels(width), DevicePixels(height)); - let scale_factor = lock.scale_factor; + let scale_factor = self.state.scale_factor.get(); let mut should_resize_renderer = false; - if lock.restore_from_minimized.is_some() { - lock.callbacks.request_frame = lock.restore_from_minimized.take(); + if let Some(restore_from_minimized) = self.state.restore_from_minimized.take() { + self.state + .callbacks + .request_frame + .set(Some(restore_from_minimized)); } else { should_resize_renderer = true; } - drop(lock); self.handle_size_change(new_size, scale_factor, should_resize_renderer); Some(0) @@ -199,15 +200,19 @@ impl WindowsWindowInner { should_resize_renderer: bool, ) { let new_logical_size = device_size.to_pixels(scale_factor); - let mut lock = self.state.borrow_mut(); - lock.logical_size = new_logical_size; - if should_resize_renderer { - lock.renderer.resize(device_size).log_err(); + + self.state.logical_size.set(new_logical_size); + if should_resize_renderer + && let Err(e) = self.state.renderer.borrow_mut().resize(device_size) + { + log::error!("Failed to resize renderer, invalidating devices: {}", e); + self.state + .invalidate_devices + .store(true, std::sync::atomic::Ordering::Release); } - if let Some(mut callback) = lock.callbacks.resize.take() { - drop(lock); + if let Some(mut callback) = self.state.callbacks.resize.take() { callback(new_logical_size, scale_factor); - self.state.borrow_mut().callbacks.resize = Some(callback); + self.state.callbacks.resize.set(Some(callback)); } } @@ -239,7 +244,7 @@ impl WindowsWindowInner { fn handle_timer_msg(&self, handle: HWND, wparam: WPARAM) -> Option { if wparam.0 == SIZE_MOVE_LOOP_TIMER_ID { for runnable in self.main_receiver.drain() { - runnable.run(); + WindowsDispatcher::execute_runnable(runnable); } self.handle_paint_msg(handle) } else { @@ -252,17 +257,14 @@ impl WindowsWindowInner { } fn handle_close_msg(&self) -> Option { - let mut callback = self.state.borrow_mut().callbacks.should_close.take()?; + let mut callback = self.state.callbacks.should_close.take()?; let should_close = callback(); - self.state.borrow_mut().callbacks.should_close = Some(callback); + self.state.callbacks.should_close.set(Some(callback)); if should_close { None } else { Some(0) } } fn handle_destroy_msg(&self, handle: HWND) -> Option { - let callback = { - let mut lock = self.state.borrow_mut(); - lock.callbacks.close.take() - }; + let callback = { self.state.callbacks.close.take() }; if let Some(callback) = callback { callback(); } @@ -281,12 +283,10 @@ impl WindowsWindowInner { fn handle_mouse_move_msg(&self, handle: HWND, lparam: LPARAM, wparam: WPARAM) -> Option { self.start_tracking_mouse(handle, TME_LEAVE); - let mut lock = self.state.borrow_mut(); - let Some(mut func) = lock.callbacks.input.take() else { + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; - let scale_factor = lock.scale_factor; - drop(lock); + let scale_factor = self.state.scale_factor.get(); let pressed_button = match MODIFIERKEYS_FLAGS(wparam.loword() as u32) { flags if flags.contains(MK_LBUTTON) => Some(MouseButton::Left), @@ -308,32 +308,32 @@ impl WindowsWindowInner { modifiers: current_modifiers(), }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } fn handle_mouse_leave_msg(&self) -> Option { - let mut lock = self.state.borrow_mut(); - lock.hovered = false; - if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { - drop(lock); + self.state.hovered.set(false); + if let Some(mut callback) = self.state.callbacks.hovered_status_change.take() { callback(false); - self.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + self.state + .callbacks + .hovered_status_change + .set(Some(callback)); } Some(0) } fn handle_syskeyup_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); - let input = handle_key_event(wparam, lparam, &mut lock, |keystroke, _| { + let input = handle_key_event(wparam, lparam, &self.state, |keystroke, _| { PlatformInput::KeyUp(KeyUpEvent { keystroke }) })?; - let mut func = lock.callbacks.input.take()?; - drop(lock); + let mut func = self.state.callbacks.input.take()?; + func(input); - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); // Always return 0 to indicate that the message was handled, so we could properly handle `ModifiersChanged` event. Some(0) @@ -342,11 +342,10 @@ impl WindowsWindowInner { // It's a known bug that you can't trigger `ctrl-shift-0`. See: // https://superuser.com/questions/1455762/ctrl-shift-number-key-combination-has-stopped-working-for-a-few-numbers fn handle_keydown_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); let Some(input) = handle_key_event( wparam, lparam, - &mut lock, + &self.state, |keystroke, prefer_character_input| { PlatformInput::KeyDown(KeyDownEvent { keystroke, @@ -357,34 +356,31 @@ impl WindowsWindowInner { ) else { return Some(1); }; - drop(lock); - let Some(mut func) = self.state.borrow_mut().callbacks.input.take() else { + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } fn handle_keyup_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); - let Some(input) = handle_key_event(wparam, lparam, &mut lock, |keystroke, _| { + let Some(input) = handle_key_event(wparam, lparam, &self.state, |keystroke, _| { PlatformInput::KeyUp(KeyUpEvent { keystroke }) }) else { return Some(1); }; - let Some(mut func) = lock.callbacks.input.take() else { + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; - drop(lock); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } @@ -405,16 +401,15 @@ impl WindowsWindowInner { lparam: LPARAM, ) -> Option { unsafe { SetCapture(handle) }; - let mut lock = self.state.borrow_mut(); - let Some(mut func) = lock.callbacks.input.take() else { + + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; let x = lparam.signed_loword(); let y = lparam.signed_hiword(); let physical_point = point(DevicePixels(x as i32), DevicePixels(y as i32)); - let click_count = lock.click_state.update(button, physical_point); - let scale_factor = lock.scale_factor; - drop(lock); + let click_count = self.state.click_state.update(button, physical_point); + let scale_factor = self.state.scale_factor.get(); let input = PlatformInput::MouseDown(MouseDownEvent { button, @@ -424,7 +419,7 @@ impl WindowsWindowInner { first_mouse: false, }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } @@ -436,15 +431,14 @@ impl WindowsWindowInner { lparam: LPARAM, ) -> Option { unsafe { ReleaseCapture().log_err() }; - let mut lock = self.state.borrow_mut(); - let Some(mut func) = lock.callbacks.input.take() else { + + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; let x = lparam.signed_loword() as f32; let y = lparam.signed_hiword() as f32; - let click_count = lock.click_state.current_count; - let scale_factor = lock.scale_factor; - drop(lock); + let click_count = self.state.click_state.current_count.get(); + let scale_factor = self.state.scale_factor.get(); let input = PlatformInput::MouseUp(MouseUpEvent { button, @@ -453,7 +447,7 @@ impl WindowsWindowInner { click_count, }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } @@ -480,26 +474,23 @@ impl WindowsWindowInner { lparam: LPARAM, ) -> Option { let modifiers = current_modifiers(); - let mut lock = self.state.borrow_mut(); - let Some(mut func) = lock.callbacks.input.take() else { + + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; - let scale_factor = lock.scale_factor; + let scale_factor = self.state.scale_factor.get(); let wheel_scroll_amount = match modifiers.shift { - true => { - self.system_settings - .borrow() - .mouse_wheel_settings - .wheel_scroll_chars - } - false => { - self.system_settings - .borrow() - .mouse_wheel_settings - .wheel_scroll_lines - } + true => self + .system_settings() + .mouse_wheel_settings + .wheel_scroll_chars + .get(), + false => self + .system_settings() + .mouse_wheel_settings + .wheel_scroll_lines + .get(), }; - drop(lock); let wheel_distance = (wparam.signed_hiword() as f32 / WHEEL_DELTA as f32) * wheel_scroll_amount as f32; @@ -524,7 +515,7 @@ impl WindowsWindowInner { touch_phase: TouchPhase::Moved, }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } @@ -535,17 +526,15 @@ impl WindowsWindowInner { wparam: WPARAM, lparam: LPARAM, ) -> Option { - let mut lock = self.state.borrow_mut(); - let Some(mut func) = lock.callbacks.input.take() else { + let Some(mut func) = self.state.callbacks.input.take() else { return Some(1); }; - let scale_factor = lock.scale_factor; + let scale_factor = self.state.scale_factor.get(); let wheel_scroll_chars = self - .system_settings - .borrow() + .system_settings() .mouse_wheel_settings - .wheel_scroll_chars; - drop(lock); + .wheel_scroll_chars + .get(); let wheel_distance = (-wparam.signed_hiword() as f32 / WHEEL_DELTA as f32) * wheel_scroll_chars as f32; @@ -564,7 +553,7 @@ impl WindowsWindowInner { touch_phase: TouchPhase::Moved, }); let handled = !func(event).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { Some(1) } } @@ -658,11 +647,11 @@ impl WindowsWindowInner { wparam: WPARAM, lparam: LPARAM, ) -> Option { - if !self.hide_title_bar || self.state.borrow().is_fullscreen() || wparam.0 == 0 { + if !self.hide_title_bar || self.state.is_fullscreen() || wparam.0 == 0 { return None; } - let is_maximized = self.state.borrow().is_maximized(); + let is_maximized = self.state.is_maximized(); let insets = get_client_area_insets(handle, is_maximized, self.windows_version); // wparam is TRUE so lparam points to an NCCALCSIZE_PARAMS structure let mut params = lparam.0 as *mut NCCALCSIZE_PARAMS; @@ -677,8 +666,7 @@ impl WindowsWindowInner { // used by Chrome. However, it may result in one row of pixels being obscured // in our client area. But as Chrome says, "there seems to be no better solution." if is_maximized - && let Some(ref taskbar_position) = - self.system_settings.borrow().auto_hide_taskbar_position + && let Some(taskbar_position) = self.system_settings().auto_hide_taskbar_position.get() { // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, // so the window isn't treated as a "fullscreen app", which would cause @@ -707,11 +695,9 @@ impl WindowsWindowInner { let this = self.clone(); self.executor .spawn(async move { - let mut lock = this.state.borrow_mut(); - if let Some(mut func) = lock.callbacks.active_status_change.take() { - drop(lock); + if let Some(mut func) = this.state.callbacks.active_status_change.take() { func(activated); - this.state.borrow_mut().callbacks.active_status_change = Some(func); + this.state.callbacks.active_status_change.set(Some(func)); } }) .detach(); @@ -735,38 +721,64 @@ impl WindowsWindowInner { lparam: LPARAM, ) -> Option { let new_dpi = wparam.loword() as f32; - let mut lock = self.state.borrow_mut(); - let is_maximized = lock.is_maximized(); + + let is_maximized = self.state.is_maximized(); let new_scale_factor = new_dpi / USER_DEFAULT_SCREEN_DPI as f32; - lock.scale_factor = new_scale_factor; - lock.border_offset.update(handle).log_err(); - drop(lock); - - let rect = unsafe { &*(lparam.0 as *const RECT) }; - let width = rect.right - rect.left; - let height = rect.bottom - rect.top; - // this will emit `WM_SIZE` and `WM_MOVE` right here - // even before this function returns - // the new size is handled in `WM_SIZE` - unsafe { - SetWindowPos( - handle, - None, - rect.left, - rect.top, - width, - height, - SWP_NOZORDER | SWP_NOACTIVATE, - ) - .context("unable to set window position after dpi has changed") - .log_err(); - } + self.state.scale_factor.set(new_scale_factor); + self.state.border_offset.update(handle).log_err(); - // When maximized, SetWindowPos doesn't send WM_SIZE, so we need to manually - // update the size and call the resize callback if is_maximized { - let device_size = size(DevicePixels(width), DevicePixels(height)); - self.handle_size_change(device_size, new_scale_factor, true); + // Get the monitor and its work area at the new DPI + let monitor = unsafe { MonitorFromWindow(handle, MONITOR_DEFAULTTONEAREST) }; + let mut monitor_info: MONITORINFO = unsafe { std::mem::zeroed() }; + monitor_info.cbSize = std::mem::size_of::() as u32; + if unsafe { GetMonitorInfoW(monitor, &mut monitor_info) }.as_bool() { + let work_area = monitor_info.rcWork; + let width = work_area.right - work_area.left; + let height = work_area.bottom - work_area.top; + + // Update the window size to match the new monitor work area + // This will trigger WM_SIZE which will handle the size change + unsafe { + SetWindowPos( + handle, + None, + work_area.left, + work_area.top, + width, + height, + SWP_NOZORDER | SWP_NOACTIVATE | SWP_FRAMECHANGED, + ) + .context("unable to set maximized window position after dpi has changed") + .log_err(); + } + + // SetWindowPos may not send WM_SIZE for maximized windows in some cases, + // so we manually update the size to ensure proper rendering + let device_size = size(DevicePixels(width), DevicePixels(height)); + self.handle_size_change(device_size, new_scale_factor, true); + } + } else { + // For non-maximized windows, use the suggested RECT from the system + let rect = unsafe { &*(lparam.0 as *const RECT) }; + let width = rect.right - rect.left; + let height = rect.bottom - rect.top; + // this will emit `WM_SIZE` and `WM_MOVE` right here + // even before this function returns + // the new size is handled in `WM_SIZE` + unsafe { + SetWindowPos( + handle, + None, + rect.left, + rect.top, + width, + height, + SWP_NOZORDER | SWP_NOACTIVATE, + ) + .context("unable to set window position after dpi has changed") + .log_err(); + } } Some(0) @@ -787,7 +799,7 @@ impl WindowsWindowInner { // Because WM_DPICHANGED, WM_MOVE, WM_SIZE will come first, window reposition and resize // are handled there. // So we only care about if monitor is disconnected. - let previous_monitor = self.state.borrow().display; + let previous_monitor = self.state.display.get(); if WindowsDisplay::is_connected(previous_monitor.handle) { // we are fine, other display changed return None; @@ -804,87 +816,79 @@ impl WindowsWindowInner { log::error!("No monitor detected!"); return None; } - let new_display = WindowsDisplay::new_with_handle(new_monitor); - self.state.borrow_mut().display = new_display; + let new_display = WindowsDisplay::new_with_handle(new_monitor).log_err()?; + self.state.display.set(new_display); Some(0) } - fn handle_hit_test_msg( - &self, - handle: HWND, - msg: u32, - wparam: WPARAM, - lparam: LPARAM, - ) -> Option { - if !self.is_movable || self.state.borrow().is_fullscreen() { + fn handle_hit_test_msg(&self, handle: HWND, lparam: LPARAM) -> Option { + if !self.is_movable || self.state.is_fullscreen() { return None; } - let mut lock = self.state.borrow_mut(); - if let Some(mut callback) = lock.callbacks.hit_test_window_control.take() { - drop(lock); + let callback = self.state.callbacks.hit_test_window_control.take(); + let drag_area = if let Some(mut callback) = callback { let area = callback(); - self.state.borrow_mut().callbacks.hit_test_window_control = Some(callback); + self.state + .callbacks + .hit_test_window_control + .set(Some(callback)); if let Some(area) = area { - return match area { + match area { WindowControlArea::Drag => Some(HTCAPTION as _), - WindowControlArea::Close => Some(HTCLOSE as _), - WindowControlArea::Max => Some(HTMAXBUTTON as _), - WindowControlArea::Min => Some(HTMINBUTTON as _), - }; + WindowControlArea::Close => return Some(HTCLOSE as _), + WindowControlArea::Max => return Some(HTMAXBUTTON as _), + WindowControlArea::Min => return Some(HTMINBUTTON as _), + } + } else { + None } } else { - drop(lock); - } + None + }; if !self.hide_title_bar { // If the OS draws the title bar, we don't need to handle hit test messages. - return None; - } - - // default handler for resize areas - let hit = unsafe { DefWindowProcW(handle, msg, wparam, lparam) }; - if matches!( - hit.0 as u32, - HTNOWHERE - | HTRIGHT - | HTLEFT - | HTTOPLEFT - | HTTOP - | HTTOPRIGHT - | HTBOTTOMRIGHT - | HTBOTTOM - | HTBOTTOMLEFT - ) { - return Some(hit.0); - } - - if self.state.borrow().is_fullscreen() { - return Some(HTCLIENT as _); + return drag_area; } let dpi = unsafe { GetDpiForWindow(handle) }; - let frame_y = unsafe { GetSystemMetricsForDpi(SM_CYFRAME, dpi) }; - + // We do not use the OS title bar, so the default `DefWindowProcW` will only register a 1px edge for resizes + // We need to calculate the frame thickness ourselves and do the hit test manually. + let frame_y = get_frame_thicknessx(dpi); + let frame_x = get_frame_thicknessy(dpi); let mut cursor_point = POINT { x: lparam.signed_loword().into(), y: lparam.signed_hiword().into(), }; + unsafe { ScreenToClient(handle, &mut cursor_point).ok().log_err() }; - if !self.state.borrow().is_maximized() && cursor_point.y >= 0 && cursor_point.y <= frame_y { - return Some(HTTOP as _); + if !self.state.is_maximized() && 0 <= cursor_point.y && cursor_point.y <= frame_y { + // x-axis actually goes from -frame_x to 0 + return Some(if cursor_point.x <= 0 { + HTTOPLEFT + } else { + let mut rect = Default::default(); + unsafe { GetWindowRect(handle, &mut rect) }.log_err(); + // right and bottom bounds of RECT are exclusive, thus `-1` + let right = rect.right - rect.left - 1; + // the bounds include the padding frames, so accomodate for both of them + if right - 2 * frame_x <= cursor_point.x { + HTTOPRIGHT + } else { + HTTOP + } + } as _); } - Some(HTCLIENT as _) + drag_area } fn handle_nc_mouse_move_msg(&self, handle: HWND, lparam: LPARAM) -> Option { self.start_tracking_mouse(handle, TME_LEAVE | TME_NONCLIENT); - let mut lock = self.state.borrow_mut(); - let mut func = lock.callbacks.input.take()?; - let scale_factor = lock.scale_factor; - drop(lock); + let mut func = self.state.callbacks.input.take()?; + let scale_factor = self.state.scale_factor.get(); let mut cursor_point = POINT { x: lparam.signed_loword().into(), @@ -897,7 +901,7 @@ impl WindowsWindowInner { modifiers: current_modifiers(), }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { Some(0) } else { None } } @@ -909,17 +913,15 @@ impl WindowsWindowInner { wparam: WPARAM, lparam: LPARAM, ) -> Option { - let mut lock = self.state.borrow_mut(); - if let Some(mut func) = lock.callbacks.input.take() { - let scale_factor = lock.scale_factor; + if let Some(mut func) = self.state.callbacks.input.take() { + let scale_factor = self.state.scale_factor.get(); let mut cursor_point = POINT { x: lparam.signed_loword().into(), y: lparam.signed_hiword().into(), }; unsafe { ScreenToClient(handle, &mut cursor_point).ok().log_err() }; let physical_point = point(DevicePixels(cursor_point.x), DevicePixels(cursor_point.y)); - let click_count = lock.click_state.update(button, physical_point); - drop(lock); + let click_count = self.state.click_state.update(button, physical_point); let input = PlatformInput::MouseDown(MouseDownEvent { button, @@ -930,21 +932,20 @@ impl WindowsWindowInner { }); let result = func(input); let handled = !result.propagate || result.default_prevented; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { return Some(0); } } else { - drop(lock); }; // Since these are handled in handle_nc_mouse_up_msg we must prevent the default window proc if button == MouseButton::Left { match wparam.0 as u32 { - HTMINBUTTON => self.state.borrow_mut().nc_button_pressed = Some(HTMINBUTTON), - HTMAXBUTTON => self.state.borrow_mut().nc_button_pressed = Some(HTMAXBUTTON), - HTCLOSE => self.state.borrow_mut().nc_button_pressed = Some(HTCLOSE), + HTMINBUTTON => self.state.nc_button_pressed.set(Some(HTMINBUTTON)), + HTMAXBUTTON => self.state.nc_button_pressed.set(Some(HTMAXBUTTON)), + HTCLOSE => self.state.nc_button_pressed.set(Some(HTCLOSE)), _ => return None, }; Some(0) @@ -960,10 +961,8 @@ impl WindowsWindowInner { wparam: WPARAM, lparam: LPARAM, ) -> Option { - let mut lock = self.state.borrow_mut(); - if let Some(mut func) = lock.callbacks.input.take() { - let scale_factor = lock.scale_factor; - drop(lock); + if let Some(mut func) = self.state.callbacks.input.take() { + let scale_factor = self.state.scale_factor.get(); let mut cursor_point = POINT { x: lparam.signed_loword().into(), @@ -977,16 +976,15 @@ impl WindowsWindowInner { click_count: 1, }); let handled = !func(input).propagate; - self.state.borrow_mut().callbacks.input = Some(func); + self.state.callbacks.input.set(Some(func)); if handled { return Some(0); } } else { - drop(lock); } - let last_pressed = self.state.borrow_mut().nc_button_pressed.take(); + let last_pressed = self.state.nc_button_pressed.take(); if button == MouseButton::Left && let Some(last_pressed) = last_pressed { @@ -996,7 +994,7 @@ impl WindowsWindowInner { true } (HTMAXBUTTON, HTMAXBUTTON) => { - if self.state.borrow().is_maximized() { + if self.state.is_maximized() { unsafe { ShowWindowAsync(handle, SW_NORMAL).ok().log_err() }; } else { unsafe { ShowWindowAsync(handle, SW_MAXIMIZE).ok().log_err() }; @@ -1021,17 +1019,16 @@ impl WindowsWindowInner { } fn handle_cursor_changed(&self, lparam: LPARAM) -> Option { - let mut state = self.state.borrow_mut(); - let had_cursor = state.current_cursor.is_some(); + let had_cursor = self.state.current_cursor.get().is_some(); - state.current_cursor = if lparam.0 == 0 { + self.state.current_cursor.set(if lparam.0 == 0 { None } else { Some(HCURSOR(lparam.0 as _)) - }; + }); - if had_cursor != state.current_cursor.is_some() { - unsafe { SetCursor(state.current_cursor) }; + if had_cursor != self.state.current_cursor.get().is_some() { + unsafe { SetCursor(self.state.current_cursor.get()) }; } Some(0) @@ -1054,9 +1051,9 @@ impl WindowsWindowInner { return None; } unsafe { - SetCursor(self.state.borrow().current_cursor); + SetCursor(self.state.current_cursor.get()); }; - Some(1) + Some(0) } fn handle_system_settings_changed( @@ -1066,13 +1063,12 @@ impl WindowsWindowInner { lparam: LPARAM, ) -> Option { if wparam.0 != 0 { - let mut lock = self.state.borrow_mut(); - let display = lock.display; - lock.click_state.system_update(wparam.0); - lock.border_offset.update(handle).log_err(); - // system settings may emit a window message which wants to take the refcell lock, so drop it - drop(lock); - self.system_settings.borrow_mut().update(display, wparam.0); + let display = self.state.display.get(); + self.state.click_state.system_update(wparam.0); + self.state.border_offset.update(handle).log_err(); + // system settings may emit a window message which wants to take the refcell self.state, so drop it + + self.system_settings().update(display, wparam.0); } else { self.handle_system_theme_changed(handle, lparam)?; }; @@ -1095,13 +1091,13 @@ impl WindowsWindowInner { let new_appearance = system_appearance() .context("unable to get system appearance when handling ImmersiveColorSet") .log_err()?; - let mut lock = self.state.borrow_mut(); - if new_appearance != lock.appearance { - lock.appearance = new_appearance; - let mut callback = lock.callbacks.appearance_changed.take()?; - drop(lock); + + if new_appearance != self.state.appearance.get() { + self.state.appearance.set(new_appearance); + let mut callback = self.state.callbacks.appearance_changed.take()?; + callback(); - self.state.borrow_mut().callbacks.appearance_changed = Some(callback); + self.state.callbacks.appearance_changed.set(Some(callback)); configure_dwm_dark_mode(handle, new_appearance); } } @@ -1130,10 +1126,14 @@ impl WindowsWindowInner { } fn handle_device_lost(&self, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); let devices = lparam.0 as *const DirectXDevices; let devices = unsafe { &*devices }; - if let Err(err) = lock.renderer.handle_device_lost(&devices) { + if let Err(err) = self + .state + .renderer + .borrow_mut() + .handle_device_lost(&devices) + { panic!("Device lost: {err}"); } Some(0) @@ -1141,29 +1141,36 @@ impl WindowsWindowInner { #[inline] fn draw_window(&self, handle: HWND, force_render: bool) -> Option { - let mut request_frame = self.state.borrow_mut().callbacks.request_frame.take()?; + let mut request_frame = self.state.callbacks.request_frame.take()?; + + // we are instructing gpui to force render a frame, this will + // re-populate all the gpu textures for us so we can resume drawing in + // case we disabled drawing earlier due to a device loss + self.state.renderer.borrow_mut().mark_drawable(); request_frame(RequestFrameOptions { require_presentation: false, force_render, }); - self.state.borrow_mut().callbacks.request_frame = Some(request_frame); + + self.state.callbacks.request_frame.set(Some(request_frame)); unsafe { ValidateRect(Some(handle), None).ok().log_err() }; + Some(0) } #[inline] fn parse_char_message(&self, wparam: WPARAM) -> Option { let code_point = wparam.loword(); - let mut lock = self.state.borrow_mut(); + // https://www.unicode.org/versions/Unicode16.0.0/core-spec/chapter-3/#G2630 match code_point { 0xD800..=0xDBFF => { // High surrogate, wait for low surrogate - lock.pending_surrogate = Some(code_point); + self.state.pending_surrogate.set(Some(code_point)); None } 0xDC00..=0xDFFF => { - if let Some(high_surrogate) = lock.pending_surrogate.take() { + if let Some(high_surrogate) = self.state.pending_surrogate.take() { // Low surrogate, combine with pending high surrogate String::from_utf16(&[high_surrogate, code_point]).ok() } else { @@ -1175,7 +1182,7 @@ impl WindowsWindowInner { } } _ => { - lock.pending_surrogate = None; + self.state.pending_surrogate.set(None); char::from_u32(code_point as u32) .filter(|c| !c.is_control()) .map(|c| c.to_string()) @@ -1184,9 +1191,8 @@ impl WindowsWindowInner { } fn start_tracking_mouse(&self, handle: HWND, flags: TRACKMOUSEEVENT_FLAGS) { - let mut lock = self.state.borrow_mut(); - if !lock.hovered { - lock.hovered = true; + if !self.state.hovered.get() { + self.state.hovered.set(true); unsafe { TrackMouseEvent(&mut TRACKMOUSEEVENT { cbSize: std::mem::size_of::() as u32, @@ -1196,10 +1202,12 @@ impl WindowsWindowInner { }) .log_err() }; - if let Some(mut callback) = lock.callbacks.hovered_status_change.take() { - drop(lock); + if let Some(mut callback) = self.state.callbacks.hovered_status_change.take() { callback(true); - self.state.borrow_mut().callbacks.hovered_status_change = Some(callback); + self.state + .callbacks + .hovered_status_change + .set(Some(callback)); } } } @@ -1208,9 +1216,9 @@ impl WindowsWindowInner { where F: FnOnce(&mut PlatformInputHandler) -> R, { - let mut input_handler = self.state.borrow_mut().input_handler.take()?; + let mut input_handler = self.state.input_handler.take()?; let result = f(&mut input_handler); - self.state.borrow_mut().input_handler = Some(input_handler); + self.state.input_handler.set(Some(input_handler)); Some(result) } @@ -1218,12 +1226,11 @@ impl WindowsWindowInner { where F: FnOnce(&mut PlatformInputHandler, f32) -> Option, { - let mut lock = self.state.borrow_mut(); - let mut input_handler = lock.input_handler.take()?; - let scale_factor = lock.scale_factor; - drop(lock); + let mut input_handler = self.state.input_handler.take()?; + let scale_factor = self.state.scale_factor.get(); + let result = f(&mut input_handler, scale_factor); - self.state.borrow_mut().input_handler = Some(input_handler); + self.state.input_handler.set(Some(input_handler)); result } } @@ -1231,7 +1238,7 @@ impl WindowsWindowInner { fn handle_key_event( wparam: WPARAM, lparam: LPARAM, - state: &mut WindowsWindowState, + state: &WindowsWindowState, f: F, ) -> Option where @@ -1244,11 +1251,12 @@ where VK_SHIFT | VK_CONTROL | VK_MENU | VK_LMENU | VK_RMENU | VK_LWIN | VK_RWIN => { if state .last_reported_modifiers + .get() .is_some_and(|prev_modifiers| prev_modifiers == modifiers) { return None; } - state.last_reported_modifiers = Some(modifiers); + state.last_reported_modifiers.set(Some(modifiers)); Some(PlatformInput::ModifiersChanged(ModifiersChangedEvent { modifiers, capslock: current_capslock(), @@ -1259,11 +1267,12 @@ where let capslock = current_capslock(); if state .last_reported_capslock + .get() .is_some_and(|prev_capslock| prev_capslock == capslock) { return None; } - state.last_reported_capslock = Some(capslock); + state.last_reported_capslock.set(Some(capslock)); Some(PlatformInput::ModifiersChanged(ModifiersChangedEvent { modifiers, capslock, @@ -1495,7 +1504,7 @@ fn get_client_area_insets( // The top inset is calculated using an empirical formula that I derived through various // tests. Without this, the top 1-2 rows of pixels in our window would be obscured. let dpi = unsafe { GetDpiForWindow(handle) }; - let frame_thickness = get_frame_thickness(dpi); + let frame_thickness = get_frame_thicknessx(dpi); let top_insets = if is_maximized { frame_thickness } else { @@ -1516,12 +1525,18 @@ fn get_client_area_insets( // borders on Windows: // - SM_CXSIZEFRAME: The resize handle. // - SM_CXPADDEDBORDER: Additional border space that isn't part of the resize handle. -fn get_frame_thickness(dpi: u32) -> i32 { +fn get_frame_thicknessx(dpi: u32) -> i32 { let resize_frame_thickness = unsafe { GetSystemMetricsForDpi(SM_CXSIZEFRAME, dpi) }; let padding_thickness = unsafe { GetSystemMetricsForDpi(SM_CXPADDEDBORDER, dpi) }; resize_frame_thickness + padding_thickness } +fn get_frame_thicknessy(dpi: u32) -> i32 { + let resize_frame_thickness = unsafe { GetSystemMetricsForDpi(SM_CYSIZEFRAME, dpi) }; + let padding_thickness = unsafe { GetSystemMetricsForDpi(SM_CXPADDEDBORDER, dpi) }; + resize_frame_thickness + padding_thickness +} + fn notify_frame_changed(handle: HWND) { unsafe { SetWindowPos( diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index b985cc14b01b1171d4013bf5c41a0c5199565503..af0cb89ecc94da70cc42c8d4c397aeb2a811d6fb 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -1,14 +1,16 @@ use std::{ - cell::RefCell, + cell::{Cell, RefCell}, ffi::OsStr, path::{Path, PathBuf}, rc::{Rc, Weak}, - sync::{Arc, atomic::Ordering}, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, }; use ::util::{ResultExt, paths::SanitizedPath}; use anyhow::{Context as _, Result, anyhow}; -use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use itertools::Itertools; use parking_lot::RwLock; @@ -37,37 +39,40 @@ pub(crate) struct WindowsPlatform { text_system: Arc, windows_version: WindowsVersion, drop_target_helper: IDropTargetHelper, + /// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices + /// as resizing them has failed, causing us to have lost at least the render target. + invalidate_devices: Arc, handle: HWND, disable_direct_composition: bool, } struct WindowsPlatformInner { - state: RefCell, + state: WindowsPlatformState, raw_window_handles: std::sync::Weak>>, // The below members will never change throughout the entire lifecycle of the app. validation_number: usize, - main_receiver: flume::Receiver, + main_receiver: flume::Receiver, dispatcher: Arc, } pub(crate) struct WindowsPlatformState { callbacks: PlatformCallbacks, - menus: Vec, - jump_list: JumpList, + menus: RefCell>, + jump_list: RefCell, // NOTE: standard cursor handles don't need to close. - pub(crate) current_cursor: Option, - directx_devices: Option, + pub(crate) current_cursor: Cell>, + directx_devices: RefCell>, } #[derive(Default)] struct PlatformCallbacks { - open_urls: Option)>>, - quit: Option>, - reopen: Option>, - app_menu_action: Option>, - will_open_app_menu: Option>, - validate_app_menu_command: Option bool>>, - keyboard_layout_change: Option>, + open_urls: Cell)>>>, + quit: Cell>>, + reopen: Cell>>, + app_menu_action: Cell>>, + will_open_app_menu: Cell>>, + validate_app_menu_command: Cell bool>>>, + keyboard_layout_change: Cell>>, } impl WindowsPlatformState { @@ -79,10 +84,10 @@ impl WindowsPlatformState { Self { callbacks, - jump_list, - current_cursor, - directx_devices, - menus: Vec::new(), + jump_list: RefCell::new(jump_list), + current_cursor: Cell::new(current_cursor), + directx_devices: RefCell::new(directx_devices), + menus: RefCell::new(Vec::new()), } } } @@ -93,7 +98,7 @@ impl WindowsPlatform { OleInitialize(None).context("unable to initialize Windows OLE")?; } let directx_devices = DirectXDevices::new().context("Creating DirectX devices")?; - let (main_sender, main_receiver) = flume::unbounded::(); + let (main_sender, main_receiver) = flume::unbounded::(); let validation_number = if usize::BITS == 64 { rand::random::() as usize } else { @@ -163,6 +168,7 @@ impl WindowsPlatform { disable_direct_composition, windows_version, drop_target_helper, + invalidate_devices: Arc::new(AtomicBool::new(false)), }) } @@ -188,14 +194,15 @@ impl WindowsPlatform { WindowCreationInfo { icon: self.icon, executor: self.foreground_executor.clone(), - current_cursor: self.inner.state.borrow().current_cursor, + current_cursor: self.inner.state.current_cursor.get(), windows_version: self.windows_version, drop_target_helper: self.drop_target_helper.clone(), validation_number: self.inner.validation_number, main_receiver: self.inner.main_receiver.clone(), platform_window_handle: self.handle, disable_direct_composition: self.disable_direct_composition, - directx_devices: self.inner.state.borrow().directx_devices.clone().unwrap(), + directx_devices: self.inner.state.directx_devices.borrow().clone().unwrap(), + invalidate_devices: self.invalidate_devices.clone(), } } @@ -206,9 +213,8 @@ impl WindowsPlatform { actions.push(dock_menu); } }); - let mut lock = self.inner.state.borrow_mut(); - lock.jump_list.dock_menus = actions; - update_jump_list(&lock.jump_list).log_err(); + self.inner.state.jump_list.borrow_mut().dock_menus = actions; + update_jump_list(&self.inner.state.jump_list.borrow()).log_err(); } fn update_jump_list( @@ -222,12 +228,10 @@ impl WindowsPlatform { actions.push(dock_menu); } }); - let mut lock = self.inner.state.borrow_mut(); - lock.jump_list.dock_menus = actions; - lock.jump_list.recent_workspaces = entries; - update_jump_list(&lock.jump_list) - .log_err() - .unwrap_or_default() + let mut jump_list = self.inner.state.jump_list.borrow_mut(); + jump_list.dock_menus = actions; + jump_list.recent_workspaces = entries; + update_jump_list(&jump_list).log_err().unwrap_or_default() } fn find_current_active_window(&self) -> Option { @@ -243,18 +247,22 @@ impl WindowsPlatform { } fn begin_vsync_thread(&self) { - let mut directx_device = self.inner.state.borrow().directx_devices.clone().unwrap(); + let mut directx_device = self.inner.state.directx_devices.borrow().clone().unwrap(); let platform_window: SafeHwnd = self.handle.into(); let validation_number = self.inner.validation_number; let all_windows = Arc::downgrade(&self.raw_window_handles); let text_system = Arc::downgrade(&self.text_system); + let invalidate_devices = self.invalidate_devices.clone(); + std::thread::Builder::new() .name("VSyncProvider".to_owned()) .spawn(move || { let vsync_provider = VSyncProvider::new(); loop { vsync_provider.wait_for_vsync(); - if check_device_lost(&directx_device.device) { + if check_device_lost(&directx_device.device) + || invalidate_devices.fetch_and(false, Ordering::Acquire) + { if let Err(err) = handle_gpu_device_lost( &mut directx_device, platform_window.as_raw(), @@ -323,9 +331,9 @@ impl Platform for WindowsPlatform { fn on_keyboard_layout_change(&self, callback: Box) { self.inner .state - .borrow_mut() .callbacks - .keyboard_layout_change = Some(callback); + .keyboard_layout_change + .set(Some(callback)); } fn run(&self, on_finish_launching: Box) { @@ -342,9 +350,8 @@ impl Platform for WindowsPlatform { } } - if let Some(ref mut callback) = self.inner.state.borrow_mut().callbacks.quit { - callback(); - } + self.inner + .with_callback(|callbacks| &callbacks.quit, |callback| callback()); } fn quit(&self) { @@ -379,11 +386,12 @@ impl Platform for WindowsPlatform { #[allow( clippy::disallowed_methods, reason = "We are restarting ourselves, using std command thus is fine" - )] - let restart_process = util::command::new_std_command("powershell.exe") - .arg("-command") - .arg(script) - .spawn(); + )] // todo(shell): There might be no powershell on the system + let restart_process = + util::command::new_std_command(util::shell::get_windows_system_shell()) + .arg("-command") + .arg(script) + .spawn(); match restart_process { Ok(_) => self.quit(), @@ -462,7 +470,7 @@ impl Platform for WindowsPlatform { } fn on_open_urls(&self, callback: Box)>) { - self.inner.state.borrow_mut().callbacks.open_urls = Some(callback); + self.inner.state.callbacks.open_urls.set(Some(callback)); } fn prompt_for_paths( @@ -532,19 +540,19 @@ impl Platform for WindowsPlatform { } fn on_quit(&self, callback: Box) { - self.inner.state.borrow_mut().callbacks.quit = Some(callback); + self.inner.state.callbacks.quit.set(Some(callback)); } fn on_reopen(&self, callback: Box) { - self.inner.state.borrow_mut().callbacks.reopen = Some(callback); + self.inner.state.callbacks.reopen.set(Some(callback)); } fn set_menus(&self, menus: Vec